Compare commits

..

11 Commits

Author SHA1 Message Date
Sebastian Lohff ec7cbab410 Add protobuf as dependency 2020-06-15 03:18:56 +02:00
Sebastian Lohff 180e127fff New (temporary) colorscheme for new map 2020-06-15 03:18:32 +02:00
Sebastian Lohff 841e1d015a Protect crawl converter against broken crawls
Some crawls don't contain all ASNs. As we only want to convert the crawl
and not add additional information we just skip adding stub info for
non-existant ASNs.
2020-06-15 03:14:09 +02:00
Sebastian Lohff b506c7c8a2 Python3 fixes for old crawler 2020-06-15 03:12:14 +02:00
Sebastian Lohff 90cbce9550 Improve conv tool to convert all old graphs 2020-06-14 03:36:42 +02:00
Sebastian Lohff 7208502bfb Save graph in crawl converter 2020-06-14 00:24:18 +02:00
Sebastian Lohff 53c70e8042 Get graph from api for new map 2020-06-14 00:23:59 +02:00
Sebastian Lohff ec3ed26fa6 Broken line width calculation (returns always 3) 2020-06-13 23:52:50 +02:00
Sebastian Lohff ba59fd48d1 Stub detection + no path protection for crawl converter 2020-06-13 19:52:22 +02:00
Sebastian Lohff e3c83bc83e Prefix belongs to last node in as path, not first 2020-06-13 19:51:12 +02:00
Sebastian Lohff c81e632a72 Properly maintain neighbor count for everyone 2020-06-13 19:50:52 +02:00
5 changed files with 76 additions and 22 deletions

View File

@ -45,7 +45,7 @@ def parseBGPData(raw, asno):
# mkify
raw = d.group(2).split("\n")
arr = filter(lambda _z: _z, map(lambda _y: filter(lambda _x: _x, re.split(r"\s+", _y)), raw))
arr = list(filter(lambda _z: _z, map(lambda _y: list(filter(lambda _x: _x, re.split(r"\s+", _y))), raw)))
# parse for bird/quagga
result = None
@ -80,7 +80,7 @@ def parseBird(data, raw, asno):
status = _birdFindTable(data, "show status")
if status[2][0] != "1011-Router":
err("Couldn't find router id in bird output")
peers = filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data))
peers = list(filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data)))
if asno == None:
err("Host is bird")
@ -135,7 +135,7 @@ def _birdMakeProtocols(info):
# state (established, active)
# if error, last error is avilable
protocols = []
for proto, data in _birdFindProtocols(info).iteritems():
for proto, data in _birdFindProtocols(info).items():
protoInfo = {
"name": proto,
"type": data[0][1],
@ -343,7 +343,7 @@ def _quaggaFindRoutes(raw):
d["network"] = lastIP
# "parse" path (everything after 61 chars, but no i)
path = filter(lambda _x: _x not in ('', 'i'), line[61:].split(" "))
path = list(filter(lambda _x: _x not in ('', 'i'), line[61:].split(" ")))
# currently skip incomplete routes
if '?' not in path:

View File

@ -98,10 +98,6 @@ def get_current_network():
log.exception("Could not get data from host %s method %s", host, host.checkMethod)
continue
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
log.info("Adding last seen neighbor info")
for asls in ASLastSeen.objects.all():
if asls.asn not in net.nodes:
@ -113,6 +109,10 @@ def get_current_network():
_populate_node(net, neigh.asn)
net.nodes[asls.asn]['online'] = False
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
log.info("Crawl done in %.2fs", time.time() - crawl_start)
# add id to edges
@ -164,7 +164,7 @@ def _add_data_to_net(net, data):
if not as_path:
continue
orig_node = _populate_node(net, as_path[0])
orig_node = _populate_node(net, as_path[-1])
orig_node['prefixes'].add(route['prefix'])
for n in range(len(as_path) - 1):
@ -196,4 +196,14 @@ def convert_crawl(crawl):
prefix = "{}/{}".format(ann.ip, ann.prefix)
path = list(map(int, ann.ASPath.split()))
net.nodes[asn.number]['routing_table'].add((prefix, tuple(path)))
net.nodes[path[-1]]['prefixes'].add(prefix)
if path:
net.nodes[path[-1]]['prefixes'].add(prefix)
for path_asn in path:
if path_asn in net.nodes and path_asn not in (path[-1], path[0]):
net.nodes[path_asn]['stub'] = False
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
return net

View File

@ -109,8 +109,9 @@ drag = simulation => {
.on("end", dragended);
}
d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
let path_tokens = window.location.pathname.split("/")
let elem = path_tokens[path_tokens.length - 2]
d3.json("/api/v2/crawlrun/" + elem + "/?with_graph").then((data) => {
// Links data join
data = data.graph
@ -151,9 +152,17 @@ d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
const link = svg
.selectAll(".link")
.data(data.links)
.join((enter) =>
.join(enter =>
enter.append("line")
.attr("class", "link"));
.attr("class", "link"))
.style("stroke-width", l => {
let width = l.source.neighbors + l.target.neighbors + 2;
if(isNaN(width))
return 3
else
return width
})
// Nodes data join
@ -171,13 +180,22 @@ d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
.attr("rx", d => d.stub ? 34 : (40 + (d.neighbors > 5 ? 5 : 0)))
.attr("ry", d => d.stub ? 12 : (20 + (d.neighbors > 5 ? 2 : 0)))
.attr("fill", function(d) {
//if(d.directly_crawled)
// return "#94FF70";
//else if(!d.online)
// return "#FFCCCC";
//else if(d.stub)
// return "#3291A8"
//else
// return "#D1FFC2";
if(d.directly_crawled)
//return "#55bc32";
return "#94FF70";
else if(!d.online)
return "#FFCCCC";
// return "#F0FFEB";
else if(d.stub)
return "#3291A8"
//return "#94FF70";
return "#E1FFE2";
else
return "#D1FFC2";
})

View File

@ -7,21 +7,46 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dnmapper.settings")
import django
django.setup()
from backend.crawler import convert_crawl
from backend.crawler import convert_crawl, net_to_json
from bgpdata.models import CrawlRun
def _convert_crawl(crawl):
net = convert_crawl(crawl)
if net.nodes and net.edges:
crawl.graph = net_to_json(net)
crawl.save()
print("Crawl {} updated".format(crawl.id))
else:
print("Crawl {} had no nodes or edges, abort".format(crawl.id))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--crawl-id", type=int)
parser.add_argument("-a", "--all", default=False, action="store_true")
parser.add_argument("-e", "--empty-graph-only", default=False, action="store_true")
args = parser.parse_args()
try:
crawl = CrawlRun.objects.get(pk=args.crawl_id)
except CrawlRun.DoesNotExist:
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
if args.crawl_id and args.all:
parser.error("-c and -a don't work together")
convert_crawl(crawl)
if args.crawl_id:
try:
crawl = CrawlRun.objects.get(pk=args.crawl_id)
except CrawlRun.DoesNotExist:
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
_convert_crawl(crawl)
elif args.all:
if args.empty_graph_only:
crawls = CrawlRun.objects.filter(graph='')
else:
crawls = CrawlRun.objects.all()
for crawl in crawls:
_convert_crawl(crawl)
else:
parser.error("Either specify a crawl with -c or use -a for all")
if __name__ == '__main__':

View File

@ -4,3 +4,4 @@ django-rest-framework
django-filter
networkx
grpcio
protobuf