Compare commits
7 Commits
9784d4dc7c
...
90cbce9550
Author | SHA1 | Date |
---|---|---|
Sebastian Lohff | 90cbce9550 | |
Sebastian Lohff | 7208502bfb | |
Sebastian Lohff | 53c70e8042 | |
Sebastian Lohff | ec3ed26fa6 | |
Sebastian Lohff | ba59fd48d1 | |
Sebastian Lohff | e3c83bc83e | |
Sebastian Lohff | c81e632a72 |
|
@ -98,10 +98,6 @@ def get_current_network():
|
|||
log.exception("Could not get data from host %s method %s", host, host.checkMethod)
|
||||
continue
|
||||
|
||||
# add neighbor count
|
||||
for node, data in net.nodes(data=True):
|
||||
data["neighbors"] = len(list(net.neighbors(node)))
|
||||
|
||||
log.info("Adding last seen neighbor info")
|
||||
for asls in ASLastSeen.objects.all():
|
||||
if asls.asn not in net.nodes:
|
||||
|
@ -113,6 +109,10 @@ def get_current_network():
|
|||
_populate_node(net, neigh.asn)
|
||||
net.nodes[asls.asn]['online'] = False
|
||||
|
||||
# add neighbor count
|
||||
for node, data in net.nodes(data=True):
|
||||
data["neighbors"] = len(list(net.neighbors(node)))
|
||||
|
||||
log.info("Crawl done in %.2fs", time.time() - crawl_start)
|
||||
|
||||
# add id to edges
|
||||
|
@ -164,7 +164,7 @@ def _add_data_to_net(net, data):
|
|||
if not as_path:
|
||||
continue
|
||||
|
||||
orig_node = _populate_node(net, as_path[0])
|
||||
orig_node = _populate_node(net, as_path[-1])
|
||||
orig_node['prefixes'].add(route['prefix'])
|
||||
|
||||
for n in range(len(as_path) - 1):
|
||||
|
@ -196,4 +196,14 @@ def convert_crawl(crawl):
|
|||
prefix = "{}/{}".format(ann.ip, ann.prefix)
|
||||
path = list(map(int, ann.ASPath.split()))
|
||||
net.nodes[asn.number]['routing_table'].add((prefix, tuple(path)))
|
||||
net.nodes[path[-1]]['prefixes'].add(prefix)
|
||||
if path:
|
||||
net.nodes[path[-1]]['prefixes'].add(prefix)
|
||||
for path_asn in path:
|
||||
if path_asn not in (path[-1], path[0]):
|
||||
net.nodes[path_asn]['stub'] = False
|
||||
|
||||
# add neighbor count
|
||||
for node, data in net.nodes(data=True):
|
||||
data["neighbors"] = len(list(net.neighbors(node)))
|
||||
|
||||
return net
|
||||
|
|
|
@ -109,8 +109,9 @@ drag = simulation => {
|
|||
.on("end", dragended);
|
||||
}
|
||||
|
||||
|
||||
d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
|
||||
let path_tokens = window.location.pathname.split("/")
|
||||
let elem = path_tokens[path_tokens.length - 2]
|
||||
d3.json("/api/v2/crawlrun/" + elem + "/?with_graph").then((data) => {
|
||||
// Links data join
|
||||
data = data.graph
|
||||
|
||||
|
@ -151,9 +152,17 @@ d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
|
|||
const link = svg
|
||||
.selectAll(".link")
|
||||
.data(data.links)
|
||||
.join((enter) =>
|
||||
.join(enter =>
|
||||
enter.append("line")
|
||||
.attr("class", "link"));
|
||||
.attr("class", "link"))
|
||||
.style("stroke-width", l => {
|
||||
let width = l.source.neighbors + l.target.neighbors + 2;
|
||||
if(isNaN(width))
|
||||
return 3
|
||||
else
|
||||
return width
|
||||
|
||||
})
|
||||
|
||||
// Nodes data join
|
||||
|
||||
|
|
37
bin/conv.py
37
bin/conv.py
|
@ -7,21 +7,46 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dnmapper.settings")
|
|||
import django
|
||||
django.setup()
|
||||
|
||||
from backend.crawler import convert_crawl
|
||||
from backend.crawler import convert_crawl, net_to_json
|
||||
from bgpdata.models import CrawlRun
|
||||
|
||||
|
||||
def _convert_crawl(crawl):
|
||||
net = convert_crawl(crawl)
|
||||
if net.nodes and net.edges:
|
||||
crawl.graph = net_to_json(net)
|
||||
crawl.save()
|
||||
print("Crawl {} updated".format(crawl.id))
|
||||
else:
|
||||
print("Crawl {} had no nodes or edges, abort".format(crawl.id))
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-c", "--crawl-id", type=int)
|
||||
parser.add_argument("-a", "--all", default=False, action="store_true")
|
||||
parser.add_argument("-e", "--empty-graph-only", default=False, action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
crawl = CrawlRun.objects.get(pk=args.crawl_id)
|
||||
except CrawlRun.DoesNotExist:
|
||||
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
|
||||
if args.crawl_id and args.all:
|
||||
parser.error("-c and -a don't work together")
|
||||
|
||||
convert_crawl(crawl)
|
||||
if args.crawl_id:
|
||||
try:
|
||||
crawl = CrawlRun.objects.get(pk=args.crawl_id)
|
||||
except CrawlRun.DoesNotExist:
|
||||
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
|
||||
_convert_crawl(crawl)
|
||||
elif args.all:
|
||||
if args.empty_graph_only:
|
||||
crawls = CrawlRun.objects.filter(graph='')
|
||||
else:
|
||||
crawls = CrawlRun.objects.all()
|
||||
|
||||
for crawl in crawls:
|
||||
_convert_crawl(crawl)
|
||||
else:
|
||||
parser.error("Either specify a crawl with -c or use -a for all")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
Loading…
Reference in New Issue