Browse Source

Migrate from tabs to spaces

Bow to the python best practices...
master
Sebastian Lohff 2 years ago
parent
commit
525d8a724d
  1. 46
      bgpdata/api.py
  2. 300
      bgpdata/models.py
  3. 16
      bgpdata/urls.py
  4. 22
      bgpdata/views.py
  5. 370
      bin/crawl.py
  6. 588
      bin/routerparsers.py
  7. 8
      dnmapper/settings.default.py
  8. 6
      dnmapper/settings.py
  9. 6
      dnmapper/urls.py

46
bgpdata/api.py

@ -7,34 +7,34 @@ from tastypie import fields @@ -7,34 +7,34 @@ from tastypie import fields
from bgpdata.models import AS, CrawlRun, Announcement, BorderRouter
class ASResource(ModelResource):
crawl = fields.ForeignKey("bgpdata.api.CrawlResource", "crawl")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'crawl': ALL_WITH_RELATIONS, 'number': ALL}
crawl = fields.ForeignKey("bgpdata.api.CrawlResource", "crawl")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'crawl': ALL_WITH_RELATIONS, 'number': ALL}
queryset = AS.objects.all()
resource_name = "as"
queryset = AS.objects.all()
resource_name = "as"
class CrawlResource(ModelResource):
class Meta:
queryset = CrawlRun.objects.all()
resource_name = "crawl"
class Meta:
queryset = CrawlRun.objects.all()
resource_name = "crawl"
class BorderRouterResource(ModelResource):
AS = fields.ForeignKey("bgpdata.api.ASResource", "AS")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'AS': ALL_WITH_RELATIONS}
AS = fields.ForeignKey("bgpdata.api.ASResource", "AS")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'AS': ALL_WITH_RELATIONS}
queryset = BorderRouter.objects.all()
resource_name = "borderrouter"
queryset = BorderRouter.objects.all()
resource_name = "borderrouter"
class AnnouncementResource(ModelResource):
router = fields.ForeignKey("bgpdata.api.BorderRouterResource", "router")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'originAS': ALL_WITH_RELATIONS, 'crawlAS': ALL_WITH_RELATIONS, 'router': ALL_WITH_RELATIONS}
queryset = Announcement.objects.all()
router = fields.ForeignKey("bgpdata.api.BorderRouterResource", "router")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'originAS': ALL_WITH_RELATIONS, 'crawlAS': ALL_WITH_RELATIONS, 'router': ALL_WITH_RELATIONS}
queryset = Announcement.objects.all()

300
bgpdata/models.py

@ -8,185 +8,185 @@ from django.db.models import Q @@ -8,185 +8,185 @@ from django.db.models import Q
# Create your models here.
class ConfigHost(models.Model):
CHECK_CHOICES = (
('CMK', "Check MK"),
('PLAIN', "Plain"),
)
CHECK_CHOICES = (
('CMK', "Check MK"),
('PLAIN', "Plain"),
)
# asno, ip, check method,
name = models.CharField(max_length=50)
number = models.IntegerField()
ip = models.GenericIPAddressField()
checkMethod = models.CharField(max_length=4, choices=CHECK_CHOICES)
# asno, ip, check method,
name = models.CharField(max_length=50)
number = models.IntegerField()
ip = models.GenericIPAddressField()
checkMethod = models.CharField(max_length=4, choices=CHECK_CHOICES)
def __unicode__(self):
return u"%s (%s / %s)" % (self.name, self.number, self.ip)
def __unicode__(self):
return u"%s (%s / %s)" % (self.name, self.number, self.ip)
class CrawlRun(models.Model):
# time start, time end,
startTime = models.DateTimeField()
endTime = models.DateTimeField(null=True, blank=True)
hostsCrawled = models.ManyToManyField(ConfigHost, null=True, blank=True)
asCount = models.IntegerField(default=0)
asOnlineCount = models.IntegerField(default=0)
asOfflineCount = models.IntegerField(default=0)
peeringCount = models.IntegerField(default=0)
def __unicode__(self):
return u"Run %d - %s to %s" % (self.pk, self.startTime, self.endTime if self.endTime else "?")
def countAS(self):
return self.asCount
def countASOnline(self):
return self.asOnlineCount
def countASOffline(self):
return self.asOfflineCount
#return self.as_set.filter(online=False).count()
def countPeerings(self):
return self.peeringCount
#return Peering.objects.filter(Q(as1__crawl=self)|Q(as2__crawl=self)).count()
# time start, time end,
startTime = models.DateTimeField()
endTime = models.DateTimeField(null=True, blank=True)
hostsCrawled = models.ManyToManyField(ConfigHost, null=True, blank=True)
asCount = models.IntegerField(default=0)
asOnlineCount = models.IntegerField(default=0)
asOfflineCount = models.IntegerField(default=0)
peeringCount = models.IntegerField(default=0)
def __unicode__(self):
return u"Run %d - %s to %s" % (self.pk, self.startTime, self.endTime if self.endTime else "?")
def countAS(self):
return self.asCount
def countASOnline(self):
return self.asOnlineCount
def countASOffline(self):
return self.asOfflineCount
#return self.as_set.filter(online=False).count()
def countPeerings(self):
return self.peeringCount
#return Peering.objects.filter(Q(as1__crawl=self)|Q(as2__crawl=self)).count()
class CrawlLog(models.Model):
INFO = 'INFO'
ERROR = 'ERROR'
DEBUG = 'DEBUG'
WARN = 'WARN'
SEVERITY = (
(INFO, 'info'),
(ERROR, 'error'),
(DEBUG, 'debug'),
(WARN, 'warning'),
)
crawl = models.ForeignKey(CrawlRun)
host = models.ForeignKey(ConfigHost, null=True, blank=True, on_delete=models.SET_NULL)
logtime = models.DateTimeField(auto_now_add=True)
severity = models.CharField(max_length=10, choices=SEVERITY)
message = models.TextField()
@staticmethod
def log(crawl, msg, severity=None, host=None):
if not severity:
severity = CrawlLog.ERROR
log = CrawlLog()
log.crawl = crawl
log.message = msg
log.severity = severity
log.host = host
log.save()
def __unicode__(self):
host = "host %s - " % self.host.name if self.host else ""
return u"Log %s %s: %s%s" % (self.get_severity_display(), self.logtime, host, self.message)
INFO = 'INFO'
ERROR = 'ERROR'
DEBUG = 'DEBUG'
WARN = 'WARN'
SEVERITY = (
(INFO, 'info'),
(ERROR, 'error'),
(DEBUG, 'debug'),
(WARN, 'warning'),
)
crawl = models.ForeignKey(CrawlRun)
host = models.ForeignKey(ConfigHost, null=True, blank=True, on_delete=models.SET_NULL)
logtime = models.DateTimeField(auto_now_add=True)
severity = models.CharField(max_length=10, choices=SEVERITY)
message = models.TextField()
@staticmethod
def log(crawl, msg, severity=None, host=None):
if not severity:
severity = CrawlLog.ERROR
log = CrawlLog()
log.crawl = crawl
log.message = msg
log.severity = severity
log.host = host
log.save()
def __unicode__(self):
host = "host %s - " % self.host.name if self.host else ""
return u"Log %s %s: %s%s" % (self.get_severity_display(), self.logtime, host, self.message)
class AS(models.Model):
# asno
crawl = models.ForeignKey(CrawlRun)
number = models.IntegerField(db_index=True)
# asno
crawl = models.ForeignKey(CrawlRun)
number = models.IntegerField(db_index=True)
directlyCrawled = models.BooleanField(default=False)
online = models.BooleanField(default=True, db_index=True)
lastSeen = models.ForeignKey(CrawlRun, blank=True, null=True, default=None, related_name='as_lastseen')
directlyCrawled = models.BooleanField(default=False)
online = models.BooleanField(default=True, db_index=True)
lastSeen = models.ForeignKey(CrawlRun, blank=True, null=True, default=None, related_name='as_lastseen')
class Meta:
unique_together = (('crawl', 'number'),)
index_together = (
('crawl', 'number'),
)
class Meta:
unique_together = (('crawl', 'number'),)
index_together = (
('crawl', 'number'),
)
def __unicode__(self):
return u"AS %s (crawl %d)" % (self.number, self.crawl.pk)
def __unicode__(self):
return u"AS %s (crawl %d)" % (self.number, self.crawl.pk)
def setOnline(self):
if not self.online:
self.online = True
self.lastSeen = None
self.save()
def setOnline(self):
if not self.online:
self.online = True
self.lastSeen = None
self.save()
def getPeerings(self):
return Peering.objects.filter(Q(as1=self)|Q(as2=self))
def getPeerings(self):
return Peering.objects.filter(Q(as1=self)|Q(as2=self))
def getAnnouncedPrefixes(self):
return list(set(map(lambda _x: "%(ip)s/%(prefix)s" % _x, self.announcement_set.all().values('ip', 'prefix'))))
def getAnnouncedPrefixes(self):
return list(set(map(lambda _x: "%(ip)s/%(prefix)s" % _x, self.announcement_set.all().values('ip', 'prefix'))))
def formatLastSeen(self):
if self.lastSeen:
return self.lastSeen.startTime.strftime("%d.%m.%Y %H:%I")
def formatLastSeen(self):
if self.lastSeen:
return self.lastSeen.startTime.strftime("%d.%m.%Y %H:%I")
class BorderRouter(models.Model):
# as id, ip, check method, pingable, reachable
# unique: (crawl_id, asno, as id)
AS = models.ForeignKey(AS)
routerID = models.GenericIPAddressField()
pingable = models.BooleanField(default=False)
reachable = models.BooleanField(default=False)
def __unicode__(self):
p = "p" if self.pingable else "!p"
r = "r" if self.reachable else "!r"
return u"Router %s (AS %s, %s%s)" % (self.routerID, self.AS.number, p, r)
# as id, ip, check method, pingable, reachable
# unique: (crawl_id, asno, as id)
AS = models.ForeignKey(AS)
routerID = models.GenericIPAddressField()
pingable = models.BooleanField(default=False)
reachable = models.BooleanField(default=False)
def __unicode__(self):
p = "p" if self.pingable else "!p"
r = "r" if self.reachable else "!r"
return u"Router %s (AS %s, %s%s)" % (self.routerID, self.AS.number, p, r)
class Announcement(models.Model):
router = models.ForeignKey(BorderRouter)
router = models.ForeignKey(BorderRouter)
ip = models.GenericIPAddressField()
prefix = models.IntegerField()
ip = models.GenericIPAddressField()
prefix = models.IntegerField()
# NOTE: increase length for longer pathes (currently supports a length of ~85)
ASPath = models.CharField(max_length=512)
nextHop = models.GenericIPAddressField()
originAS = models.ForeignKey(AS, null=True)
crawlAS = models.ForeignKey(AS, related_name='crawl_as', null=True)
# NOTE: increase length for longer pathes (currently supports a length of ~85)
ASPath = models.CharField(max_length=512)
nextHop = models.GenericIPAddressField()
originAS = models.ForeignKey(AS, null=True)
crawlAS = models.ForeignKey(AS, related_name='crawl_as', null=True)
def __unicode__(self):
return u"%s/%s via %s (crawl %s)" % (self.ip, self.prefix, self.ASPath, self.router.AS.crawl.pk)
def __unicode__(self):
return u"%s/%s via %s (crawl %s)" % (self.ip, self.prefix, self.ASPath, self.router.AS.crawl.pk)
class Peering(models.Model):
DIRECT = 'direct'
PATH = 'path'
DIRECT = 'direct'
PATH = 'path'
ORIGIN = (
(PATH, 'BGP Path'),
(DIRECT, 'Direct Connection'),
)
ORIGIN = (
(PATH, 'BGP Path'),
(DIRECT, 'Direct Connection'),
)
index_together = (
('as1', 'as2'),
)
index_together = (
('as1', 'as2'),
)
as1 = models.ForeignKey(AS, related_name='peering1')
as2 = models.ForeignKey(AS, related_name='peering2')
origin = models.CharField(max_length=10, choices=ORIGIN)
as1 = models.ForeignKey(AS, related_name='peering1')
as2 = models.ForeignKey(AS, related_name='peering2')
origin = models.CharField(max_length=10, choices=ORIGIN)
def __unicode__(self):
return u"AS %s <--> AS %s (%s, crawl %s)" % (self.as1.number, self.as2.number, self.get_origin_display(), self.as1.crawl.pk)
def __unicode__(self):
return u"AS %s <--> AS %s (%s, crawl %s)" % (self.as1.number, self.as2.number, self.get_origin_display(), self.as1.crawl.pk)
def containsAS(self, AS):
return AS in (self.as1, self.as2)
def containsAS(self, AS):
return AS in (self.as1, self.as2)
@staticmethod
def getPeering(as1, as2):
""" Find matching peering """
try:
return Peering.objects.get(as1=as1, as2=as2)
except Peering.DoesNotExist:
return Peering.objects.get(as1=as2, as2=as1)
@staticmethod
def getPeering(as1, as2):
""" Find matching peering """
try:
return Peering.objects.get(as1=as1, as2=as2)
except Peering.DoesNotExist:
return Peering.objects.get(as1=as2, as2=as1)
class BorderRouterPair(models.Model):
peering = models.ForeignKey(Peering)
router1 = models.ForeignKey(BorderRouter, default=None, blank=True, null=True, related_name='routerpair1')
router2 = models.ForeignKey(BorderRouter, default=None, blank=True, null=True, related_name='routerpair2')
def __unicode__(self):
return u"%s <--> %s (crawl %d)" % (self.router1, self.router2, self.router1.AS.crawl.pk)
@staticmethod
def getPairing(peering, router1, router2):
try:
return BorderRouterPair.objects.get(peering=peering, router1=router1, router2=router2)
except BorderRouterPair.DoesNotExist:
return BorderRouterPair.objects.get(peering=peering, router1=router2, router2=router1)
peering = models.ForeignKey(Peering)
router1 = models.ForeignKey(BorderRouter, default=None, blank=True, null=True, related_name='routerpair1')
router2 = models.ForeignKey(BorderRouter, default=None, blank=True, null=True, related_name='routerpair2')
def __unicode__(self):
return u"%s <--> %s (crawl %d)" % (self.router1, self.router2, self.router1.AS.crawl.pk)
@staticmethod
def getPairing(peering, router1, router2):
try:
return BorderRouterPair.objects.get(peering=peering, router1=router1, router2=router2)
except BorderRouterPair.DoesNotExist:
return BorderRouterPair.objects.get(peering=peering, router1=router2, router2=router1)

16
bgpdata/urls.py

@ -13,14 +13,14 @@ borderRouterResource = BorderRouterResource() @@ -13,14 +13,14 @@ borderRouterResource = BorderRouterResource()
announcementResource = AnnouncementResource()
urlpatterns = (
url(r'^$', bgpdata_views.overview),
url(r'^([0-9]+)/$', bgpdata_views.showMap),
url(r'^$', bgpdata_views.overview),
url(r'^([0-9]+)/$', bgpdata_views.showMap),
#url(r'^api/crawl/(?P<crawlID>\d+)/asses/$', 'bgpdata.api.asses'),
#(r'^api/', include(asResource.urls)),
url(r'^api/', include(asResource.urls)),
url(r'^api/', include(crawlResource.urls)),
url(r'^api/', include(borderRouterResource.urls)),
url(r'^api/', include(announcementResource.urls)),
#url(r'^api/crawl/(?P<crawlID>\d+)/asses/$', 'bgpdata.api.asses'),
#(r'^api/', include(asResource.urls)),
url(r'^api/', include(asResource.urls)),
url(r'^api/', include(crawlResource.urls)),
url(r'^api/', include(borderRouterResource.urls)),
url(r'^api/', include(announcementResource.urls)),
)

22
bgpdata/views.py

@ -7,18 +7,18 @@ from bgpdata.models import CrawlRun, AS, Peering @@ -7,18 +7,18 @@ from bgpdata.models import CrawlRun, AS, Peering
from django.core.paginator import Paginator
def overview(request):
crawls = CrawlRun.objects.order_by("-startTime")
crawlsPage = Paginator(crawls, 200)
return render(request, 'bgpdata/overview.html', {"crawls": crawlsPage.page(1)})
crawls = CrawlRun.objects.order_by("-startTime")
crawlsPage = Paginator(crawls, 200)
return render(request, 'bgpdata/overview.html', {"crawls": crawlsPage.page(1)})
def showMap(request, crawlId):
crawl = None
try:
crawl = CrawlRun.objects.get(id=crawlId)
except CrawlRun.DoesNotExist:
return render(request, "bgpdata/no-map-found.html", {"crawl_id": crawlId})
crawl = None
try:
crawl = CrawlRun.objects.get(id=crawlId)
except CrawlRun.DoesNotExist:
return render(request, "bgpdata/no-map-found.html", {"crawl_id": crawlId})
ASses = AS.objects.filter(crawl=crawl)
peerings = Peering.objects.filter(as1__crawl=crawl)
ASses = AS.objects.filter(crawl=crawl)
peerings = Peering.objects.filter(as1__crawl=crawl)
return render(request, 'bgpdata/map.html', {"crawl": crawl, 'ASses': ASses, 'peerings': peerings})
return render(request, 'bgpdata/map.html', {"crawl": crawl, 'ASses': ASses, 'peerings': peerings})

370
bin/crawl.py

@ -26,193 +26,193 @@ from routerparsers import getBGPData, RouterParserException @@ -26,193 +26,193 @@ from routerparsers import getBGPData, RouterParserException
def getOrCreateAS(crawl, number, online=True):
currAS = None
try:
currAS = AS.objects.get(crawl=crawl, number=number)
if online:
currAS.setOnline()
except AS.DoesNotExist:
currAS = AS(crawl=crawl, number=number, online=online)
currAS.save()
currAS = None
try:
currAS = AS.objects.get(crawl=crawl, number=number)
if online:
currAS.setOnline()
except AS.DoesNotExist:
currAS = AS(crawl=crawl, number=number, online=online)
currAS.save()
return currAS
return currAS
def main():
# 1. create crawl run
crawl = CrawlRun()
crawl.startTime = timezone.now()
crawl.save()
CrawlLog.log(crawl, "Starting crawl run!", severity=CrawlLog.INFO)
# 2. get data from all hosts, put it in the database
for host in ConfigHost.objects.all():
crawl.hostsCrawled.add(host)
data = None
print(" -- Getting data for host %s" % host)
try:
if host.checkMethod == 'CMK':
data = getBGPData(host.ip, host.number)
else:
CrawlLog.log(crawl, "Method %s is not currently supported, skipping host" % host.checkMethod, host=host, severity=CrawlLog.ERROR)
continue
except RouterParserException as e:
msg = "Could not parse data for host: %s" % str(e)
print("%s: %s" % (host, msg))
CrawlLog.log(crawl, msg, host=host, severity=CrawlLog.ERROR)
continue
except socket.error as e:
msg = "Could not reach host: %s" % (e,)
print("%s: %s" % (host, msg))
CrawlLog.log(crawl, msg, host=host, severity=CrawlLog.ERROR)
continue
print(" -- parsing...")
currASno = int(data["local_as"])
currAS = getOrCreateAS(crawl, currASno)
currAS.directlyCrawled = True
currAS.save()
currRouter = None
try:
currRouter = BorderRouter.objects.get(AS=currAS, routerID=data["local_id"])
currRouter.pingable = True
currRouter.reachable = True
currRouter.save()
except BorderRouter.DoesNotExist:
currRouter = BorderRouter(AS=currAS, routerID=data["local_id"], pingable=True, reachable=True)
currRouter.save()
print(" --> peers")
for peer in data["peers"]:
# peerings
# data: BGP{state, neighbor_id, neighbor_as}, description
# a) find/create neighbor
print(" ----> Peer:", int(peer["BGP"]["neighbor_as"]))
neighAS = getOrCreateAS(crawl, int(peer["BGP"]["neighbor_as"]), online=peer["BGP"]["online"])
# b) find out if a peering already exists (maybe where we only need to add our router id?)
peering = None
try:
peering = Peering.getPeering(currAS, neighAS)
except Peering.DoesNotExist:
peering = Peering(as1=currAS, as2=neighAS, origin=Peering.DIRECT)
peering.save()
# c) look for router/peering pairs
if peer["BGP"]["neighbor_id"]:
try:
neighRouter = BorderRouter.objects.get(AS=neighAS, routerID=peer["BGP"]["neighbor_id"])
except BorderRouter.DoesNotExist:
neighRouter = BorderRouter(AS=neighAS, routerID=peer["BGP"]["neighbor_id"], pingable=False, reachable=False)
neighRouter.save()
try:
BorderRouterPair.getPairing(peering, currRouter, neighRouter)
except BorderRouterPair.DoesNotExist:
pairs = BorderRouterPair.objects.filter(Q(peering=peering) & (Q(router1=neighRouter, router2=None)|Q(router1=None, router2=neighRouter)))
if pairs.count() > 0:
pair = pairs[0]
if pair.router1 == None:
pair.router1 = currRouter
else:
pair.router2 = currRouter
pair.save()
else:
pair = BorderRouterPair(peering=peering, router1=currRouter, router2=neighRouter)
pair.save()
print(" --> Announcements")
if "routes" in data and data["routes"]:
for route in data["routes"]:
print(" ---->", route["prefix"])
if "/" not in route["prefix"]:
continue
crawlAS = currAS
if len(route["path"]) > 0:
crawlAS = getOrCreateAS(crawl, route["path"][0])
originAS = getOrCreateAS(crawl, route["path"][-1])
ip, prefix = route["prefix"].split("/")
a = Announcement(router=currRouter, ip=ip, prefix=prefix,
ASPath=" ".join(route["path"]), nextHop=route["nexthop"],
crawlAS=crawlAS, originAS=originAS)
a.save()
else:
print(" !! No routes found in host output")
CrawlLog.log(crawl, "No routes found in host output (no bgp feed included?)", host=host, severity=CrawlLog.WARN)
# 3. calculate missing data
print(" -- Adding extra data from announcements...")
# 3.1. use announcement data to find hidden peerings
for announcement in Announcement.objects.filter(router__AS__crawl=crawl):
path = announcement.ASPath.split(" ")
if len(path) > 1:
firstASno = path.pop(0)
firstAS = getOrCreateAS(crawl, firstASno)
while len(path) > 0:
secondASno = path.pop(0)
secondAS = getOrCreateAS(crawl, secondASno)
try:
Peering.getPeering(firstAS, secondAS)
except Peering.DoesNotExist:
peering = Peering(as1=firstAS, as2=secondAS, origin=Peering.PATH)
peering.save()
firstAS = secondAS
# 3.2 add ASses, routers and peerings from old crawlruns (last should suffice)
# find
print(" --> copy old ASses")
timerangeStart = crawl.startTime - datetime.timedelta(LAST_SEEN_DAYS)
oldASses = AS.objects.filter(online=True, crawl__startTime__gte=timerangeStart).values("number").annotate(lastSeen=Max('crawl_id')).filter(~Q(lastSeen=crawl.pk))
# 3.2.1. copy old asses
print(" ----> create ASses")
for oldASdata in oldASses:
print(" ------> AS", oldASdata["number"])
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"])
try:
newAS = AS.objects.get(number=oldAS.number, crawl=crawl)
if not newAS.online and not newAS.lastSeen:
newAS.lastSeen = oldAS.crawl
newAS.save()
except:
newAS = AS(number=oldAS.number, crawl=crawl, lastSeen=oldAS.crawl, directlyCrawled=False, online=False)
newAS.save()
# 3.2.2 copy peerings between old asses
print(" ----> copy peerings")
for oldASdata in oldASses:
print(" ------> AS", oldASdata["number"])
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"])
for peering in oldAS.getPeerings():
print(" --------> Peering %s <--> %s" % (peering.as1.number, peering.as2.number))
peering = Peering(
as1=AS.objects.get(number=peering.as1.number, crawl=crawl),
as2=AS.objects.get(number=peering.as2.number, crawl=crawl),
origin=peering.origin)
peering.save()
# 3.3 FIXME: do we also want to have old peerings which do not exist anymore?
# 4. end crawl run
crawl.endTime = timezone.now()
crawl.save()
# additional data
crawl.asCount = crawl.as_set.count()
crawl.asOnlineCount = crawl.as_set.filter(online=True).count()
crawl.asOfflineCount = crawl.as_set.filter(online=False).count()
crawl.peeringCount = Peering.objects.filter(Q(as1__crawl=crawl)|Q(as2__crawl=crawl)).count()
crawl.save()
print(" !! Done")
CrawlLog.log(crawl, "Crawl completed", severity=CrawlLog.INFO)
# 1. create crawl run
crawl = CrawlRun()
crawl.startTime = timezone.now()
crawl.save()
CrawlLog.log(crawl, "Starting crawl run!", severity=CrawlLog.INFO)
# 2. get data from all hosts, put it in the database
for host in ConfigHost.objects.all():
crawl.hostsCrawled.add(host)
data = None
print(" -- Getting data for host %s" % host)
try:
if host.checkMethod == 'CMK':
data = getBGPData(host.ip, host.number)
else:
CrawlLog.log(crawl, "Method %s is not currently supported, skipping host" % host.checkMethod, host=host, severity=CrawlLog.ERROR)
continue
except RouterParserException as e:
msg = "Could not parse data for host: %s" % str(e)
print("%s: %s" % (host, msg))
CrawlLog.log(crawl, msg, host=host, severity=CrawlLog.ERROR)
continue
except socket.error as e:
msg = "Could not reach host: %s" % (e,)
print("%s: %s" % (host, msg))
CrawlLog.log(crawl, msg, host=host, severity=CrawlLog.ERROR)
continue
print(" -- parsing...")
currASno = int(data["local_as"])
currAS = getOrCreateAS(crawl, currASno)
currAS.directlyCrawled = True
currAS.save()
currRouter = None
try:
currRouter = BorderRouter.objects.get(AS=currAS, routerID=data["local_id"])
currRouter.pingable = True
currRouter.reachable = True
currRouter.save()
except BorderRouter.DoesNotExist:
currRouter = BorderRouter(AS=currAS, routerID=data["local_id"], pingable=True, reachable=True)
currRouter.save()
print(" --> peers")
for peer in data["peers"]:
# peerings
# data: BGP{state, neighbor_id, neighbor_as}, description
# a) find/create neighbor
print(" ----> Peer:", int(peer["BGP"]["neighbor_as"]))
neighAS = getOrCreateAS(crawl, int(peer["BGP"]["neighbor_as"]), online=peer["BGP"]["online"])
# b) find out if a peering already exists (maybe where we only need to add our router id?)
peering = None
try:
peering = Peering.getPeering(currAS, neighAS)
except Peering.DoesNotExist:
peering = Peering(as1=currAS, as2=neighAS, origin=Peering.DIRECT)
peering.save()
# c) look for router/peering pairs
if peer["BGP"]["neighbor_id"]:
try:
neighRouter = BorderRouter.objects.get(AS=neighAS, routerID=peer["BGP"]["neighbor_id"])
except BorderRouter.DoesNotExist:
neighRouter = BorderRouter(AS=neighAS, routerID=peer["BGP"]["neighbor_id"], pingable=False, reachable=False)
neighRouter.save()
try:
BorderRouterPair.getPairing(peering, currRouter, neighRouter)
except BorderRouterPair.DoesNotExist:
pairs = BorderRouterPair.objects.filter(Q(peering=peering) & (Q(router1=neighRouter, router2=None)|Q(router1=None, router2=neighRouter)))
if pairs.count() > 0:
pair = pairs[0]
if pair.router1 == None:
pair.router1 = currRouter
else:
pair.router2 = currRouter
pair.save()
else:
pair = BorderRouterPair(peering=peering, router1=currRouter, router2=neighRouter)
pair.save()
print(" --> Announcements")
if "routes" in data and data["routes"]:
for route in data["routes"]:
print(" ---->", route["prefix"])
if "/" not in route["prefix"]:
continue
crawlAS = currAS
if len(route["path"]) > 0:
crawlAS = getOrCreateAS(crawl, route["path"][0])
originAS = getOrCreateAS(crawl, route["path"][-1])
ip, prefix = route["prefix"].split("/")
a = Announcement(router=currRouter, ip=ip, prefix=prefix,
ASPath=" ".join(route["path"]), nextHop=route["nexthop"],
crawlAS=crawlAS, originAS=originAS)
a.save()
else:
print(" !! No routes found in host output")
CrawlLog.log(crawl, "No routes found in host output (no bgp feed included?)", host=host, severity=CrawlLog.WARN)
# 3. calculate missing data
print(" -- Adding extra data from announcements...")
# 3.1. use announcement data to find hidden peerings
for announcement in Announcement.objects.filter(router__AS__crawl=crawl):
path = announcement.ASPath.split(" ")
if len(path) > 1:
firstASno = path.pop(0)
firstAS = getOrCreateAS(crawl, firstASno)
while len(path) > 0:
secondASno = path.pop(0)
secondAS = getOrCreateAS(crawl, secondASno)
try:
Peering.getPeering(firstAS, secondAS)
except Peering.DoesNotExist:
peering = Peering(as1=firstAS, as2=secondAS, origin=Peering.PATH)
peering.save()
firstAS = secondAS
# 3.2 add ASses, routers and peerings from old crawlruns (last should suffice)
# find
print(" --> copy old ASses")
timerangeStart = crawl.startTime - datetime.timedelta(LAST_SEEN_DAYS)
oldASses = AS.objects.filter(online=True, crawl__startTime__gte=timerangeStart).values("number").annotate(lastSeen=Max('crawl_id')).filter(~Q(lastSeen=crawl.pk))
# 3.2.1. copy old asses
print(" ----> create ASses")
for oldASdata in oldASses:
print(" ------> AS", oldASdata["number"])
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"])
try:
newAS = AS.objects.get(number=oldAS.number, crawl=crawl)
if not newAS.online and not newAS.lastSeen:
newAS.lastSeen = oldAS.crawl
newAS.save()
except:
newAS = AS(number=oldAS.number, crawl=crawl, lastSeen=oldAS.crawl, directlyCrawled=False, online=False)
newAS.save()
# 3.2.2 copy peerings between old asses
print(" ----> copy peerings")
for oldASdata in oldASses:
print(" ------> AS", oldASdata["number"])
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"])
for peering in oldAS.getPeerings():
print(" --------> Peering %s <--> %s" % (peering.as1.number, peering.as2.number))
peering = Peering(
as1=AS.objects.get(number=peering.as1.number, crawl=crawl),
as2=AS.objects.get(number=peering.as2.number, crawl=crawl),
origin=peering.origin)
peering.save()
# 3.3 FIXME: do we also want to have old peerings which do not exist anymore?
# 4. end crawl run
crawl.endTime = timezone.now()
crawl.save()
# additional data
crawl.asCount = crawl.as_set.count()
crawl.asOnlineCount = crawl.as_set.filter(online=True).count()
crawl.asOfflineCount = crawl.as_set.filter(online=False).count()
crawl.peeringCount = Peering.objects.filter(Q(as1__crawl=crawl)|Q(as2__crawl=crawl)).count()
crawl.save()
print(" !! Done")
CrawlLog.log(crawl, "Crawl completed", severity=CrawlLog.INFO)
if __name__ == '__main__':
main()
main()

588
bin/routerparsers.py

@ -10,344 +10,344 @@ import socket @@ -10,344 +10,344 @@ import socket
from collections import OrderedDict
class RouterParserException(Exception):
pass
pass
def err(msg):
raise RouterParserException(msg)
raise RouterParserException(msg)
def getBGPData(ip, asno):
rawData = getDataFromHost(ip)
if not rawData:
err("Could not get data from host (empty response)")
rawData = getDataFromHost(ip)
if not rawData:
err("Could not get data from host (empty response)")
router = parseBGPData(rawData, asno)
router = parseBGPData(rawData, asno)
router["ip"] = ip
router["ip"] = ip
return router
return router
def getDataFromHost(ip):
socket.setdefaulttimeout(5)
x = socket.socket()
x.connect((ip, 6556))
f = x.makefile()
data = f.read()
x.close()
socket.setdefaulttimeout(5)
x = socket.socket()
x.connect((ip, 6556))
f = x.makefile()
data = f.read()
x.close()
return data
return data
def parseBGPData(raw, asno):
d = re.search(r"(?:^|\n)<<<(quagga|bird)>>>\n(.*?)(?:$|<<<[^\n]+>>>)", raw, re.DOTALL)
d = re.search(r"(?:^|\n)<<<(quagga|bird)>>>\n(.*?)(?:$|<<<[^\n]+>>>)", raw, re.DOTALL)
if not d:
err("Data not found in check mk output")
if not d:
err("Data not found in check mk output")
# mkify
raw = d.group(2).split("\n")
arr = filter(lambda _z: _z, map(lambda _y: filter(lambda _x: _x, re.split(r"\s+", _y)), raw))
# mkify
raw = d.group(2).split("\n")
arr = filter(lambda _z: _z, map(lambda _y: filter(lambda _x: _x, re.split(r"\s+", _y)), raw))
# parse for bird/quagga
result = None
# parse for bird/quagga
result = None
if d.group(1) == "quagga":
result = parseQuagga(arr, raw, asno)
else:
result = parseBird(arr, raw, asno)
if d.group(1) == "quagga":
result = parseQuagga(arr, raw, asno)
else:
result = parseBird(arr, raw, asno)
return result
return result
def parseQuagga(data, raw, asno):
status = _quaggaFindCommand(data, "show ip bgp sum")
status = _quaggaFindCommand(data, "show ip bgp sum")
if status[0][0:3] == ['IPv4', 'Unicast', 'Summary:']:
del(status[0])
if status[0][0:3] == ['IPv4', 'Unicast', 'Summary:']:
del(status[0])
if status[0][0:3] != ['BGP', 'router', 'identifier']:
print(status)
err("Couldn't find router id in quagga output")
if status[0][0:3] != ['BGP', 'router', 'identifier']:
print(status)
err("Couldn't find router id in quagga output")
peers = _quaggaFindNeighbors(data)
if asno and int(asno) != int(status[0][7]):
err("AS number (%s) does not match as number from quagga (%s)" % (asno, status[0][7]))
peers = _quaggaFindNeighbors(data)
if asno and int(asno) != int(status[0][7]):
err("AS number (%s) does not match as number from quagga (%s)" % (asno, status[0][7]))
routes = _quaggaFindRoutes(raw)
routes = _quaggaFindRoutes(raw)
return {"local_id": status[0][3].strip(","), "local_as": int(status[0][7]), "peers": peers, "routes": routes}
return {"local_id": status[0][3].strip(","), "local_as": int(status[0][7]), "peers": peers, "routes": routes}
def parseBird(data, raw, asno):
status = _birdFindTable(data, "show status")
if status[2][0] != "1011-Router":
err("Couldn't find router id in bird output")
peers = filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data))
status = _birdFindTable(data, "show status")
if status[2][0] != "1011-Router":
err("Couldn't find router id in bird output")
peers = filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data))
if asno == None:
err("Host is bird")
# FIXME
if asno == None:
err("Host is bird")
# FIXME
routes = _birdFindRoutes(data)
routes = _birdFindRoutes(data)
return {"local_id": status[2][3], "local_as": int(asno), "peers": peers, "routes": routes}
return {"local_id": status[2][3], "local_as": int(asno), "peers": peers, "routes": routes}
def _birdFindTable(info, command):
""" find command output of a bird command, e.g. "show bgp neighbors" """
command = ["bird>"] + command.split(" ")
commandInfo = []
editNextLine = False
for line in info:
if not commandInfo:
if line == command:
commandInfo.append(line)
editNextLine = True
else:
if editNextLine:
editNextLine = False
commandInfo.append(line[1:])
elif line[0] == "bird>":
return commandInfo
else:
commandInfo.append(line)
return []
""" find command output of a bird command, e.g. "show bgp neighbors" """
command = ["bird>"] + command.split(" ")
commandInfo = []
editNextLine = False
for line in info:
if not commandInfo:
if line == command:
commandInfo.append(line)
editNextLine = True
else:
if editNextLine:
editNextLine = False
commandInfo.append(line[1:])
elif line[0] == "bird>":
return commandInfo
else:
commandInfo.append(line)
return []
def _birdFindProtocols(info):
""" return a list of tuples (protoname, protoinfo) """
protocolTable = _birdFindTable(info, "show protocols all")
protocols = OrderedDict()
currProto = None
for line in protocolTable[2:]:
if line[0][0:4] == "1002":
currProto = line[0][5:]
protocols[currProto] = [[currProto] + line[1:]]
elif currProto == None:
err("No proto selected, couldn't parse line:", line)
else:
protocols[currProto].append(line)
return protocols
""" return a list of tuples (protoname, protoinfo) """
protocolTable = _birdFindTable(info, "show protocols all")
protocols = OrderedDict()
currProto = None
for line in protocolTable[2:]:
if line[0][0:4] == "1002":
currProto = line[0][5:]
protocols[currProto] = [[currProto] + line[1:]]
elif currProto == None:
err("No proto selected, couldn't parse line:", line)
else:
protocols[currProto].append(line)
return protocols
def _birdMakeProtocols(info):
""" Parse birds show protocols all output """
# proto: name, type, description, state (up/down?), up-since
# routes imported, exported, preferred
# also: routing stats (
# bgp special stuff: state, neighbor (address, as, id) (id not available when down)
# state (established, active)
# if error, last error is avilable
protocols = []
for proto, data in _birdFindProtocols(info).iteritems():
protoInfo = {
"name": proto,
"type": data[0][1],
"table": data[0][2],
"state": data[0][3],
"last_change": data[0][4],
"info": " ".join(data[0][5:]),
"description": " ".join(data[1][2:]),
"routes": {
"imported": data[5][1],
"exported": data[5][3],
"preferred": data[5][5],
}
}
if protoInfo["type"] == "BGP":
found = False
for n, line in enumerate(data):
if line[0:2] == ["BGP", "state:"]:
found = True
protoInfo["BGP"] = {
"state": data[n][2],
"online": data[n][2] == "Established",
"neighbor_address": data[n+1][2],
"neighbor_as": int(data[n+2][2]),
"neighbor_id": data[n+3][2] if len(data) > n+3 and data[n+3][0:2] == ["Neighbor", "ID:"] else None,
"last_error": " ".join(data[n+3][2:]) if len(data) > n+3 and data[n+3][0:2] == ["Last", "error:"] else None,
}
if not found:
protoInfo["BGP"] = None
protocols.append(protoInfo)
return protocols
""" Parse birds show protocols all output """
# proto: name, type, description, state (up/down?), up-since
# routes imported, exported, preferred
# also: routing stats (
# bgp special stuff: state, neighbor (address, as, id) (id not available when down)
# state (established, active)
# if error, last error is avilable
protocols = []
for proto, data in _birdFindProtocols(info).iteritems():
protoInfo = {
"name": proto,
"type": data[0][1],
"table": data[0][2],
"state": data[0][3],
"last_change": data[0][4],
"info": " ".join(data[0][5:]),
"description": " ".join(data[1][2:]),
"routes": {
"imported": data[5][1],
"exported": data[5][3],
"preferred": data[5][5],
}
}
if protoInfo["type"] == "BGP":
found = False
for n, line in enumerate(data):
if line[0:2] == ["BGP", "state:"]:
found = True
protoInfo["BGP"] = {
"state": data[n][2],
"online": data[n][2] == "Established",
"neighbor_address": data[n+1][2],
"neighbor_as": int(data[n+2][2]),
"neighbor_id": data[n+3][2] if len(data) > n+3 and data[n+3][0:2] == ["Neighbor", "ID:"] else None,
"last_error": " ".join(data[n+3][2:]) if len(data) > n+3 and data[n+3][0:2] == ["Last", "error:"] else None,
}
if not found:
protoInfo["BGP"] = None
protocols.append(protoInfo)
return protocols
def _birdFindRoutes(info):
output = _birdFindTable(info, "show route all")
if len(output) < 1:
# no data found
return None
def handleCandidate(routes, candidate):
if candidate:
# path, nexthop, network
for key in ["path", "nexthop", "network", "iBGP"]:
if key not in candidate:
return
route = {"prefix": candidate["network"], "nexthop": candidate["nexthop"], "path": candidate["path"], "iBGP": candidate["iBGP"]}
routes.append(route)
pass
routes = []
candidate = None
lastIP = None
for line in output:
if line[0].startswith("1007-"):
# new route!
handleCandidate(routes, candidate)
if line[0] != "1007-":
# line has a network, use it!
lastIP = line[0][5:]
candidate = {"network": lastIP, "iBGP": None}
elif candidate is not None:
# search bgp attributes
if line[0] == "1012-":
pass
k, v = line[1], line[2:]
else:
k, v = line[0], line[1:]
k = k.rstrip(":")
if k == "BGP.next_hop":
candidate["nexthop"] = v[0]
elif k == "BGP.as_path":
candidate["path"] = v
handleCandidate(routes, candidate)
return routes
output = _birdFindTable(info, "show route all")
if len(output) < 1:
# no data found
return None
def handleCandidate(routes, candidate):
if candidate:
# path, nexthop, network
for key in ["path", "nexthop", "network", "iBGP"]:
if key not in candidate:
return
route = {"prefix": candidate["network"], "nexthop": candidate["nexthop"], "path": candidate["path"], "iBGP": candidate["iBGP"]}
routes.append(route)
pass
routes = []
candidate = None
lastIP = None
for line in output:
if line[0].startswith("1007-"):
# new route!
handleCandidate(routes, candidate)
if line[0] != "1007-":
# line has a network, use it!
lastIP = line[0][5:]
candidate = {"network": lastIP, "iBGP": None}
elif candidate is not None:
# search bgp attributes
if line[0] == "1012-":
pass
k, v = line[1], line[2:]
else:
k, v = line[0], line[1:]
k = k.rstrip(":")
if k == "BGP.next_hop":
candidate["nexthop"] = v[0]
elif k == "BGP.as_path":
candidate["path"] = v
handleCandidate(routes, candidate)
return routes
def _quaggaFindCommand(info, cmd):
# ['core-frunde#', 'show', 'ip', 'bgp', 'sum']
# ['core-frunde#', 'show', 'ip', 'bgp', 'neighbors']
output = []
cmd = cmd.split(" ")
prompt = None
for line in info:
if line[1:] == cmd:
prompt = line[0]
elif line[0] == prompt:
# done
return output
elif prompt != None:
output.append(line)
err("Could not find command '%s' in output" % " ".join(cmd))
# ['core-frunde#', 'show', 'ip', 'bgp', 'sum']
# ['core-frunde#', 'show', 'ip', 'bgp', 'neighbors']
output = []
cmd = cmd.split(" ")
prompt = None
for line in info:
if line[1:] == cmd:
prompt = line[0]
elif line[0] == prompt:
# done
return output
elif prompt != None:
output.append(line)
err("Could not find command '%s' in output" % " ".join(cmd))
def _quaggaFindNeighbors(info):
#['BGP', 'neighbor', 'is', '10.50.1.2,', 'remote', 'AS', '65001,', 'local', 'AS', '65001,', 'internal', 'link']
output = _quaggaFindCommand(info, "show ip bgp neighbors")
start = ["BGP", "neighbor", "is"]
curr = None
rawNeighbors = []
for line in output:
if line[0:3] == start:
if curr:
rawNeighbors.append(curr)
curr = [line]
elif curr:
curr.append(line)
else:
err("Could not find start of neighbors")
if curr:
rawNeighbors.append(curr)
curr = None
neighbors = []
neighborDict = OrderedDict()
for raw in rawNeighbors:
descrIdx = 1 if raw[1][0] == "Description:" else 0
if raw[descrIdx + 1][0] == "Hostname:":
descrIdx += 1
peerdict = {
"neighbor_address": raw[0][3].rstrip(","),
"neighbor_as": int(raw[0][6].rstrip(",")),
"local_as": int(raw[0][9].rstrip(",")),
"description": " ".join(raw[1][1:]) if descrIdx else "No description",
"neighbor_id": raw[1+descrIdx][6].strip(","),
"state": raw[2+descrIdx][3].strip(","),
"routes": {
"imported": 0,
},
"BGP": {
"state": raw[2+descrIdx][3].strip(","),
"online": raw[2+descrIdx][3].strip(",") == "Established",
"neighbor_id": raw[1+descrIdx][6].strip(","),
"neighbor_address": raw[0][3].rstrip(","),
"neighbor_as": int(raw[0][6].rstrip(",")),
"state": raw[2+descrIdx][3].strip(","),
},
}