Save stats of run in CrawlRun() - much faster

This commit is contained in:
Sebastian Lohff 2017-01-14 00:14:22 +01:00
parent 6b53bcf359
commit ead7e2f7d3
3 changed files with 56 additions and 4 deletions

View File

@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('bgpdata', '0013_auto_20150601_1513'),
]
operations = [
migrations.AddField(
model_name='crawlrun',
name='asCount',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='crawlrun',
name='asOfflineCount',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='crawlrun',
name='asOnlineCount',
field=models.IntegerField(default=0),
preserve_default=False,
),
migrations.AddField(
model_name='crawlrun',
name='peeringCount',
field=models.IntegerField(default=0),
preserve_default=False,
),
]

View File

@ -25,17 +25,24 @@ class CrawlRun(models.Model):
hostsCrawled = models.ManyToManyField(ConfigHost, null=True, blank=True)
asCount = models.IntegerField(default=0)
asOnlineCount = models.IntegerField(default=0)
asOfflineCount = models.IntegerField(default=0)
peeringCount = models.IntegerField(default=0)
def __unicode__(self):
return u"Run %d - %s to %s" % (self.pk, self.startTime, self.endTime if self.endTime else "?")
def countAS(self):
return self.as_set.count()
return self.asCount
def countASOnline(self):
return self.as_set.filter(online=True).count()
return self.asOnlineCount
def countASOffline(self):
return self.as_set.filter(online=False).count()
return self.asOfflineCount
#return self.as_set.filter(online=False).count()
def countPeerings(self):
return Peering.objects.filter(Q(as1__crawl=self)|Q(as2__crawl=self)).count()
return self.peeringCount
#return Peering.objects.filter(Q(as1__crawl=self)|Q(as2__crawl=self)).count()
class CrawlLog(models.Model):
INFO = 'INFO'

View File

@ -199,6 +199,13 @@ def main():
crawl.endTime = timezone.now()
crawl.save()
# additional data
crawl.asCount = crawl.as_set.count()
crawl.asOnlineCount = crawl.as_set.filter(online=True).count()
crawl.asOfflineCount = crawl.as_set.filter(online=False).count()
crawl.peeringCount = Peering.objects.filter(Q(as1__crawl=crawl)|Q(as2__crawl=crawl)).count()
crawl.save()
print(" !! Done")
CrawlLog.log(crawl, "Crawl completed", severity=CrawlLog.INFO)