Compare commits

..

No commits in common. "90cbce9550088a4099f1856330a75e678b4bf4fe" and "9784d4dc7cb42a4aa24bc05ca7482d9c2fce7191" have entirely different histories.

3 changed files with 16 additions and 60 deletions

View File

@ -98,6 +98,10 @@ def get_current_network():
log.exception("Could not get data from host %s method %s", host, host.checkMethod)
continue
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
log.info("Adding last seen neighbor info")
for asls in ASLastSeen.objects.all():
if asls.asn not in net.nodes:
@ -109,10 +113,6 @@ def get_current_network():
_populate_node(net, neigh.asn)
net.nodes[asls.asn]['online'] = False
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
log.info("Crawl done in %.2fs", time.time() - crawl_start)
# add id to edges
@ -164,7 +164,7 @@ def _add_data_to_net(net, data):
if not as_path:
continue
orig_node = _populate_node(net, as_path[-1])
orig_node = _populate_node(net, as_path[0])
orig_node['prefixes'].add(route['prefix'])
for n in range(len(as_path) - 1):
@ -196,14 +196,4 @@ def convert_crawl(crawl):
prefix = "{}/{}".format(ann.ip, ann.prefix)
path = list(map(int, ann.ASPath.split()))
net.nodes[asn.number]['routing_table'].add((prefix, tuple(path)))
if path:
net.nodes[path[-1]]['prefixes'].add(prefix)
for path_asn in path:
if path_asn not in (path[-1], path[0]):
net.nodes[path_asn]['stub'] = False
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
return net
net.nodes[path[-1]]['prefixes'].add(prefix)

View File

@ -109,9 +109,8 @@ drag = simulation => {
.on("end", dragended);
}
let path_tokens = window.location.pathname.split("/")
let elem = path_tokens[path_tokens.length - 2]
d3.json("/api/v2/crawlrun/" + elem + "/?with_graph").then((data) => {
d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
// Links data join
data = data.graph
@ -152,17 +151,9 @@ d3.json("/api/v2/crawlrun/" + elem + "/?with_graph").then((data) => {
const link = svg
.selectAll(".link")
.data(data.links)
.join(enter =>
.join((enter) =>
enter.append("line")
.attr("class", "link"))
.style("stroke-width", l => {
let width = l.source.neighbors + l.target.neighbors + 2;
if(isNaN(width))
return 3
else
return width
})
.attr("class", "link"));
// Nodes data join

View File

@ -7,46 +7,21 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dnmapper.settings")
import django
django.setup()
from backend.crawler import convert_crawl, net_to_json
from backend.crawler import convert_crawl
from bgpdata.models import CrawlRun
def _convert_crawl(crawl):
net = convert_crawl(crawl)
if net.nodes and net.edges:
crawl.graph = net_to_json(net)
crawl.save()
print("Crawl {} updated".format(crawl.id))
else:
print("Crawl {} had no nodes or edges, abort".format(crawl.id))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--crawl-id", type=int)
parser.add_argument("-a", "--all", default=False, action="store_true")
parser.add_argument("-e", "--empty-graph-only", default=False, action="store_true")
args = parser.parse_args()
if args.crawl_id and args.all:
parser.error("-c and -a don't work together")
try:
crawl = CrawlRun.objects.get(pk=args.crawl_id)
except CrawlRun.DoesNotExist:
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
if args.crawl_id:
try:
crawl = CrawlRun.objects.get(pk=args.crawl_id)
except CrawlRun.DoesNotExist:
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
_convert_crawl(crawl)
elif args.all:
if args.empty_graph_only:
crawls = CrawlRun.objects.filter(graph='')
else:
crawls = CrawlRun.objects.all()
for crawl in crawls:
_convert_crawl(crawl)
else:
parser.error("Either specify a crawl with -c or use -a for all")
convert_crawl(crawl)
if __name__ == '__main__':