|
|
|
@ -1,6 +1,9 @@
@@ -1,6 +1,9 @@
|
|
|
|
|
#!/usr/bin/env python2 |
|
|
|
|
from __future__ import print_function |
|
|
|
|
|
|
|
|
|
# config |
|
|
|
|
LAST_SEEN_DAYS = 7 |
|
|
|
|
|
|
|
|
|
# prepare environment |
|
|
|
|
import sys |
|
|
|
|
sys.path.append("..") |
|
|
|
@ -21,6 +24,8 @@ def getOrCreateAS(crawl, number, online=True):
@@ -21,6 +24,8 @@ def getOrCreateAS(crawl, number, online=True):
|
|
|
|
|
currAS = None |
|
|
|
|
try: |
|
|
|
|
currAS = AS.objects.get(crawl=crawl, number=number) |
|
|
|
|
if online: |
|
|
|
|
currAS.setOnline() |
|
|
|
|
except AS.DoesNotExist: |
|
|
|
|
currAS = AS(crawl=crawl, number=number, online=online) |
|
|
|
|
currAS.save() |
|
|
|
@ -76,12 +81,7 @@ def main():
@@ -76,12 +81,7 @@ def main():
|
|
|
|
|
|
|
|
|
|
# a) find/create neighbor |
|
|
|
|
print(" ----> Peer:", int(peer["BGP"]["neighbor_as"])) |
|
|
|
|
neighAS = None |
|
|
|
|
try: |
|
|
|
|
neighAS = AS.objects.get(crawl=crawl, number=int(peer["BGP"]["neighbor_as"])) |
|
|
|
|
except AS.DoesNotExist: |
|
|
|
|
neighAS = AS(crawl=crawl, number=int(peer["BGP"]["neighbor_as"])) |
|
|
|
|
neighAS.save() |
|
|
|
|
neighAS = getOrCreateAS(crawl, int(peer["BGP"]["neighbor_as"]), online=peer["BGP"]["online"]) |
|
|
|
|
|
|
|
|
|
# b) find out if a peering already exists (maybe where we only need to add our router id?) |
|
|
|
|
peering = None |
|
|
|
@ -155,8 +155,8 @@ def main():
@@ -155,8 +155,8 @@ def main():
|
|
|
|
|
# 3.2 add ASses, routers and peerings from old crawlruns (last should suffice) |
|
|
|
|
# find |
|
|
|
|
print(" --> copy old ASses") |
|
|
|
|
timerangeStart = crawl.startTime - datetime.timedelta(7) |
|
|
|
|
oldASses = AS.objects.filter(crawl__startTime__gte=timerangeStart).values("number").annotate(lastSeen=Max('crawl_id')).filter(~Q(lastSeen=crawl.pk)) |
|
|
|
|
timerangeStart = crawl.startTime - datetime.timedelta(LAST_SEEN_DAYS) |
|
|
|
|
oldASses = AS.objects.filter(online=True, crawl__startTime__gte=timerangeStart).values("number").annotate(lastSeen=Max('crawl_id')).filter(~Q(lastSeen=crawl.pk)) |
|
|
|
|
|
|
|
|
|
# 3.2.1. copy old asses |
|
|
|
|
print(" ----> create ASses") |
|
|
|
@ -164,8 +164,14 @@ def main():
@@ -164,8 +164,14 @@ def main():
|
|
|
|
|
print(" ------> AS", oldASdata["number"]) |
|
|
|
|
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"]) |
|
|
|
|
|
|
|
|
|
newAS = AS(number=oldAS.number, crawl=crawl, lastSeen=oldAS.crawl, directlyCrawled=False, online=False) |
|
|
|
|
newAS.save() |
|
|
|
|
try: |
|
|
|
|
newAS = AS.objects.get(number=oldAS.number, crawl=crawl) |
|
|
|
|
if not newAS.online and not newAS.lastSeen: |
|
|
|
|
newAS.lastSeen = oldAS.crawl |
|
|
|
|
newAS.save() |
|
|
|
|
except: |
|
|
|
|
newAS = AS(number=oldAS.number, crawl=crawl, lastSeen=oldAS.crawl, directlyCrawled=False, online=False) |
|
|
|
|
newAS.save() |
|
|
|
|
|
|
|
|
|
# 3.2.2 copy peerings between old asses |
|
|
|
|
print(" ----> copy peerings") |
|
|
|
|