Compare commits

..

4 Commits

Author SHA1 Message Date
Sebastian Lohff ec7cbab410 Add protobuf as dependency 2020-06-15 03:18:56 +02:00
Sebastian Lohff 180e127fff New (temporary) colorscheme for new map 2020-06-15 03:18:32 +02:00
Sebastian Lohff 841e1d015a Protect crawl converter against broken crawls
Some crawls don't contain all ASNs. As we only want to convert the crawl
and not add additional information we just skip adding stub info for
non-existant ASNs.
2020-06-15 03:14:09 +02:00
Sebastian Lohff b506c7c8a2 Python3 fixes for old crawler 2020-06-15 03:12:14 +02:00
4 changed files with 17 additions and 7 deletions

View File

@ -45,7 +45,7 @@ def parseBGPData(raw, asno):
# mkify # mkify
raw = d.group(2).split("\n") raw = d.group(2).split("\n")
arr = filter(lambda _z: _z, map(lambda _y: filter(lambda _x: _x, re.split(r"\s+", _y)), raw)) arr = list(filter(lambda _z: _z, map(lambda _y: list(filter(lambda _x: _x, re.split(r"\s+", _y))), raw)))
# parse for bird/quagga # parse for bird/quagga
result = None result = None
@ -80,7 +80,7 @@ def parseBird(data, raw, asno):
status = _birdFindTable(data, "show status") status = _birdFindTable(data, "show status")
if status[2][0] != "1011-Router": if status[2][0] != "1011-Router":
err("Couldn't find router id in bird output") err("Couldn't find router id in bird output")
peers = filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data)) peers = list(filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data)))
if asno == None: if asno == None:
err("Host is bird") err("Host is bird")
@ -135,7 +135,7 @@ def _birdMakeProtocols(info):
# state (established, active) # state (established, active)
# if error, last error is avilable # if error, last error is avilable
protocols = [] protocols = []
for proto, data in _birdFindProtocols(info).iteritems(): for proto, data in _birdFindProtocols(info).items():
protoInfo = { protoInfo = {
"name": proto, "name": proto,
"type": data[0][1], "type": data[0][1],
@ -343,7 +343,7 @@ def _quaggaFindRoutes(raw):
d["network"] = lastIP d["network"] = lastIP
# "parse" path (everything after 61 chars, but no i) # "parse" path (everything after 61 chars, but no i)
path = filter(lambda _x: _x not in ('', 'i'), line[61:].split(" ")) path = list(filter(lambda _x: _x not in ('', 'i'), line[61:].split(" ")))
# currently skip incomplete routes # currently skip incomplete routes
if '?' not in path: if '?' not in path:

View File

@ -199,7 +199,7 @@ def convert_crawl(crawl):
if path: if path:
net.nodes[path[-1]]['prefixes'].add(prefix) net.nodes[path[-1]]['prefixes'].add(prefix)
for path_asn in path: for path_asn in path:
if path_asn not in (path[-1], path[0]): if path_asn in net.nodes and path_asn not in (path[-1], path[0]):
net.nodes[path_asn]['stub'] = False net.nodes[path_asn]['stub'] = False
# add neighbor count # add neighbor count

View File

@ -180,13 +180,22 @@ d3.json("/api/v2/crawlrun/" + elem + "/?with_graph").then((data) => {
.attr("rx", d => d.stub ? 34 : (40 + (d.neighbors > 5 ? 5 : 0))) .attr("rx", d => d.stub ? 34 : (40 + (d.neighbors > 5 ? 5 : 0)))
.attr("ry", d => d.stub ? 12 : (20 + (d.neighbors > 5 ? 2 : 0))) .attr("ry", d => d.stub ? 12 : (20 + (d.neighbors > 5 ? 2 : 0)))
.attr("fill", function(d) { .attr("fill", function(d) {
//if(d.directly_crawled)
// return "#94FF70";
//else if(!d.online)
// return "#FFCCCC";
//else if(d.stub)
// return "#3291A8"
//else
// return "#D1FFC2";
if(d.directly_crawled) if(d.directly_crawled)
//return "#55bc32";
return "#94FF70"; return "#94FF70";
else if(!d.online) else if(!d.online)
return "#FFCCCC"; return "#FFCCCC";
// return "#F0FFEB";
else if(d.stub) else if(d.stub)
return "#3291A8" //return "#94FF70";
return "#E1FFE2";
else else
return "#D1FFC2"; return "#D1FFC2";
}) })

View File

@ -4,3 +4,4 @@ django-rest-framework
django-filter django-filter
networkx networkx
grpcio grpcio
protobuf