diff --git a/bgpdata/api.py b/bgpdata/api.py index c6b26ee..f14ccb3 100644 --- a/bgpdata/api.py +++ b/bgpdata/api.py @@ -32,5 +32,5 @@ class AnnouncementResource(ModelResource): class Meta: list_allowed_methods = ['get'] detail_allowed_methods = ['get'] - filtering = {'originAS': ALL_WITH_RELATIONS, 'router': ALL_WITH_RELATIONS} + filtering = {'originAS': ALL_WITH_RELATIONS, 'crawlAS': ALL_WITH_RELATIONS, 'router': ALL_WITH_RELATIONS} queryset = Announcement.objects.all() diff --git a/bgpdata/migrations/0015_auto_20170117_0034.py b/bgpdata/migrations/0015_auto_20170117_0034.py new file mode 100644 index 0000000..a0eb9c6 --- /dev/null +++ b/bgpdata/migrations/0015_auto_20170117_0034.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import models, migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('bgpdata', '0014_auto_20170113_2022'), + ] + + operations = [ + migrations.AddField( + model_name='announcement', + name='crawlAS', + field=models.ForeignKey(related_name='crawl_as', to='bgpdata.AS', null=True), + preserve_default=True, + ), + migrations.AlterField( + model_name='crawlrun', + name='asCount', + field=models.IntegerField(default=0), + preserve_default=True, + ), + migrations.AlterField( + model_name='crawlrun', + name='asOfflineCount', + field=models.IntegerField(default=0), + preserve_default=True, + ), + migrations.AlterField( + model_name='crawlrun', + name='asOnlineCount', + field=models.IntegerField(default=0), + preserve_default=True, + ), + migrations.AlterField( + model_name='crawlrun', + name='peeringCount', + field=models.IntegerField(default=0), + preserve_default=True, + ), + ] diff --git a/bgpdata/migrations/0016_auto_20170117_0103.py b/bgpdata/migrations/0016_auto_20170117_0103.py new file mode 100644 index 0000000..11cef4f --- /dev/null +++ b/bgpdata/migrations/0016_auto_20170117_0103.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import models, migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('bgpdata', '0015_auto_20170117_0034'), + ] + + operations = [ + migrations.AlterField( + model_name='announcement', + name='originAS', + field=models.ForeignKey(to='bgpdata.AS', null=True), + preserve_default=True, + ), + ] diff --git a/bgpdata/models.py b/bgpdata/models.py index e5c4ff8..9002856 100644 --- a/bgpdata/models.py +++ b/bgpdata/models.py @@ -105,6 +105,9 @@ class AS(models.Model): def getPeerings(self): return Peering.objects.filter(Q(as1=self)|Q(as2=self)) + def getAnnouncedPrefixes(self): + return list(set(map(lambda _x: "%(ip)s/%(prefix)s" % _x, self.announcement_set.all().values('ip', 'prefix')))) + def formatLastSeen(self): if self.lastSeen: return self.lastSeen.startTime.strftime("%d.%m.%Y %H:%I") @@ -132,7 +135,8 @@ class Announcement(models.Model): # NOTE: increase length for longer pathes (currently supports a length of ~85) ASPath = models.CharField(max_length=512) nextHop = models.GenericIPAddressField() - originAS = models.ForeignKey(AS) + originAS = models.ForeignKey(AS, null=True) + crawlAS = models.ForeignKey(AS, related_name='crawl_as', null=True) def __unicode__(self): return u"%s/%s via %s (crawl %s)" % (self.ip, self.prefix, self.ASPath, self.router.AS.crawl.pk) diff --git a/bgpdata/templates/bgpdata/map.html b/bgpdata/templates/bgpdata/map.html index 20c58a5..ee5bfe9 100644 --- a/bgpdata/templates/bgpdata/map.html +++ b/bgpdata/templates/bgpdata/map.html @@ -25,7 +25,7 @@ var asdata = [ {% for AS in ASses %} - {id: {{AS.pk}}, nodetype: "AS", asnumber: {{AS.number}}, label: "{{AS.number}}", neighbors: {{AS.getPeerings.count}}, crawled: {%if AS.directlyCrawled%}true{%else%}false{%endif%}, online: {%if AS.online%}true{%else%}false{%endif%}, lastSeenDate: "{{AS.formatLastSeen}}", lastSeen: {%if AS.lastSeen%}{{AS.lastSeen.pk}}{%else%}null{%endif%}, dismiss: true}{%if not forloop.last%},{%endif%} + {id: {{AS.pk}}, nodetype: "AS", asnumber: {{AS.number}}, label: "{{AS.number}}", neighbors: {{AS.getPeerings.count}}, crawled: {%if AS.directlyCrawled%}true{%else%}false{%endif%}, online: {%if AS.online%}true{%else%}false{%endif%}, lastSeenDate: "{{AS.formatLastSeen}}", lastSeen: {%if AS.lastSeen%}{{AS.lastSeen.pk}}{%else%}null{%endif%}, dismiss: true, announcements: [{% for a in AS.getAnnouncedPrefixes %}"{{a}}"{%if not forloop.last%},{%endif%}{%endfor%}]}{%if not forloop.last%},{%endif%} {%endfor%} ]; @@ -172,6 +172,11 @@ $('svg g').tipsy({ if(d.crawled) { content += 'NoteDirectly crawled'; } + if(d.announcements.length > 0) { + content += 'Netw.'; + content += d.announcements.join(", ") + content += ''; + } content += ''; //(function(node) { diff --git a/bin/crawl.py b/bin/crawl.py index 672a372..0f66db9 100755 --- a/bin/crawl.py +++ b/bin/crawl.py @@ -127,13 +127,14 @@ def main(): if "/" not in route["prefix"]: continue - originAS = currAS + crawlAS = currAS if len(route["path"]) > 0: - originAS = getOrCreateAS(crawl, route["path"][0]) + crawlAS = getOrCreateAS(crawl, route["path"][0]) + originAS = getOrCreateAS(crawl, route["path"][-1]) ip, prefix = route["prefix"].split("/") a = Announcement(router=currRouter, ip=ip, prefix=prefix, ASPath=" ".join(route["path"]), nextHop=route["nexthop"], - originAS=originAS) + crawlAS=crawlAS, originAS=originAS) a.save() else: print(" !! No routes found in host output")