Show peers as offline, if they are
This commit is contained in:
parent
ff61388de5
commit
1326773a55
|
@ -69,9 +69,18 @@ class AS(models.Model):
|
||||||
online = models.BooleanField(default=True)
|
online = models.BooleanField(default=True)
|
||||||
lastSeen = models.ForeignKey(CrawlRun, blank=True, null=True, default=None, related_name='as_lastseen')
|
lastSeen = models.ForeignKey(CrawlRun, blank=True, null=True, default=None, related_name='as_lastseen')
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = (('crawl', 'number'),)
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return u"AS %s (crawl %d)" % (self.number, self.crawl.pk)
|
return u"AS %s (crawl %d)" % (self.number, self.crawl.pk)
|
||||||
|
|
||||||
|
def setOnline(self):
|
||||||
|
if not self.online:
|
||||||
|
self.online = True
|
||||||
|
self.lastSeen = None
|
||||||
|
self.save()
|
||||||
|
|
||||||
def getPeerings(self):
|
def getPeerings(self):
|
||||||
return Peering.objects.filter(Q(as1=self)|Q(as2=self))
|
return Peering.objects.filter(Q(as1=self)|Q(as2=self))
|
||||||
|
|
||||||
|
|
26
bin/crawl.py
26
bin/crawl.py
|
@ -1,6 +1,9 @@
|
||||||
#!/usr/bin/env python2
|
#!/usr/bin/env python2
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
# config
|
||||||
|
LAST_SEEN_DAYS = 7
|
||||||
|
|
||||||
# prepare environment
|
# prepare environment
|
||||||
import sys
|
import sys
|
||||||
sys.path.append("..")
|
sys.path.append("..")
|
||||||
|
@ -21,6 +24,8 @@ def getOrCreateAS(crawl, number, online=True):
|
||||||
currAS = None
|
currAS = None
|
||||||
try:
|
try:
|
||||||
currAS = AS.objects.get(crawl=crawl, number=number)
|
currAS = AS.objects.get(crawl=crawl, number=number)
|
||||||
|
if online:
|
||||||
|
currAS.setOnline()
|
||||||
except AS.DoesNotExist:
|
except AS.DoesNotExist:
|
||||||
currAS = AS(crawl=crawl, number=number, online=online)
|
currAS = AS(crawl=crawl, number=number, online=online)
|
||||||
currAS.save()
|
currAS.save()
|
||||||
|
@ -76,12 +81,7 @@ def main():
|
||||||
|
|
||||||
# a) find/create neighbor
|
# a) find/create neighbor
|
||||||
print(" ----> Peer:", int(peer["BGP"]["neighbor_as"]))
|
print(" ----> Peer:", int(peer["BGP"]["neighbor_as"]))
|
||||||
neighAS = None
|
neighAS = getOrCreateAS(crawl, int(peer["BGP"]["neighbor_as"]), online=peer["BGP"]["online"])
|
||||||
try:
|
|
||||||
neighAS = AS.objects.get(crawl=crawl, number=int(peer["BGP"]["neighbor_as"]))
|
|
||||||
except AS.DoesNotExist:
|
|
||||||
neighAS = AS(crawl=crawl, number=int(peer["BGP"]["neighbor_as"]))
|
|
||||||
neighAS.save()
|
|
||||||
|
|
||||||
# b) find out if a peering already exists (maybe where we only need to add our router id?)
|
# b) find out if a peering already exists (maybe where we only need to add our router id?)
|
||||||
peering = None
|
peering = None
|
||||||
|
@ -155,8 +155,8 @@ def main():
|
||||||
# 3.2 add ASses, routers and peerings from old crawlruns (last should suffice)
|
# 3.2 add ASses, routers and peerings from old crawlruns (last should suffice)
|
||||||
# find
|
# find
|
||||||
print(" --> copy old ASses")
|
print(" --> copy old ASses")
|
||||||
timerangeStart = crawl.startTime - datetime.timedelta(7)
|
timerangeStart = crawl.startTime - datetime.timedelta(LAST_SEEN_DAYS)
|
||||||
oldASses = AS.objects.filter(crawl__startTime__gte=timerangeStart).values("number").annotate(lastSeen=Max('crawl_id')).filter(~Q(lastSeen=crawl.pk))
|
oldASses = AS.objects.filter(online=True, crawl__startTime__gte=timerangeStart).values("number").annotate(lastSeen=Max('crawl_id')).filter(~Q(lastSeen=crawl.pk))
|
||||||
|
|
||||||
# 3.2.1. copy old asses
|
# 3.2.1. copy old asses
|
||||||
print(" ----> create ASses")
|
print(" ----> create ASses")
|
||||||
|
@ -164,8 +164,14 @@ def main():
|
||||||
print(" ------> AS", oldASdata["number"])
|
print(" ------> AS", oldASdata["number"])
|
||||||
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"])
|
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"])
|
||||||
|
|
||||||
newAS = AS(number=oldAS.number, crawl=crawl, lastSeen=oldAS.crawl, directlyCrawled=False, online=False)
|
try:
|
||||||
newAS.save()
|
newAS = AS.objects.get(number=oldAS.number, crawl=crawl)
|
||||||
|
if not newAS.online and not newAS.lastSeen:
|
||||||
|
newAS.lastSeen = oldAS.crawl
|
||||||
|
newAS.save()
|
||||||
|
except:
|
||||||
|
newAS = AS(number=oldAS.number, crawl=crawl, lastSeen=oldAS.crawl, directlyCrawled=False, online=False)
|
||||||
|
newAS.save()
|
||||||
|
|
||||||
# 3.2.2 copy peerings between old asses
|
# 3.2.2 copy peerings between old asses
|
||||||
print(" ----> copy peerings")
|
print(" ----> copy peerings")
|
||||||
|
|
|
@ -151,6 +151,7 @@ def _birdMakeProtocols(info):
|
||||||
found = True
|
found = True
|
||||||
protoInfo["BGP"] = {
|
protoInfo["BGP"] = {
|
||||||
"state": data[n][2],
|
"state": data[n][2],
|
||||||
|
"online": data[n][2] == "Established",
|
||||||
"neighbor_address": data[n+1][2],
|
"neighbor_address": data[n+1][2],
|
||||||
"neighbor_as": int(data[n+2][2]),
|
"neighbor_as": int(data[n+2][2]),
|
||||||
"neighbor_id": data[n+3][2] if data[n+3][0:2] == ["Neighbor", "ID:"] else None,
|
"neighbor_id": data[n+3][2] if data[n+3][0:2] == ["Neighbor", "ID:"] else None,
|
||||||
|
@ -267,6 +268,7 @@ def _quaggaFindNeighbors(info):
|
||||||
},
|
},
|
||||||
"BGP": {
|
"BGP": {
|
||||||
"state": raw[2+descrIdx][3].strip(","),
|
"state": raw[2+descrIdx][3].strip(","),
|
||||||
|
"online": raw[2+descrIdx][3].strip(",") == "Established",
|
||||||
"neighbor_id": raw[1+descrIdx][6].strip(","),
|
"neighbor_id": raw[1+descrIdx][6].strip(","),
|
||||||
"neighbor_address": raw[0][3].rstrip(","),
|
"neighbor_address": raw[0][3].rstrip(","),
|
||||||
"neighbor_as": int(raw[0][6].rstrip(",")),
|
"neighbor_as": int(raw[0][6].rstrip(",")),
|
||||||
|
|
Loading…
Reference in New Issue