Compare commits

...

11 Commits

@ -45,7 +45,7 @@ def parseBGPData(raw, asno):
# mkify # mkify
raw = d.group(2).split("\n") raw = d.group(2).split("\n")
arr = filter(lambda _z: _z, map(lambda _y: filter(lambda _x: _x, re.split(r"\s+", _y)), raw)) arr = list(filter(lambda _z: _z, map(lambda _y: list(filter(lambda _x: _x, re.split(r"\s+", _y))), raw)))
# parse for bird/quagga # parse for bird/quagga
result = None result = None
@ -80,7 +80,7 @@ def parseBird(data, raw, asno):
status = _birdFindTable(data, "show status") status = _birdFindTable(data, "show status")
if status[2][0] != "1011-Router": if status[2][0] != "1011-Router":
err("Couldn't find router id in bird output") err("Couldn't find router id in bird output")
peers = filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data)) peers = list(filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data)))
if asno == None: if asno == None:
err("Host is bird") err("Host is bird")
@ -135,7 +135,7 @@ def _birdMakeProtocols(info):
# state (established, active) # state (established, active)
# if error, last error is avilable # if error, last error is avilable
protocols = [] protocols = []
for proto, data in _birdFindProtocols(info).iteritems(): for proto, data in _birdFindProtocols(info).items():
protoInfo = { protoInfo = {
"name": proto, "name": proto,
"type": data[0][1], "type": data[0][1],
@ -343,7 +343,7 @@ def _quaggaFindRoutes(raw):
d["network"] = lastIP d["network"] = lastIP
# "parse" path (everything after 61 chars, but no i) # "parse" path (everything after 61 chars, but no i)
path = filter(lambda _x: _x not in ('', 'i'), line[61:].split(" ")) path = list(filter(lambda _x: _x not in ('', 'i'), line[61:].split(" ")))
# currently skip incomplete routes # currently skip incomplete routes
if '?' not in path: if '?' not in path:

@ -98,10 +98,6 @@ def get_current_network():
log.exception("Could not get data from host %s method %s", host, host.checkMethod) log.exception("Could not get data from host %s method %s", host, host.checkMethod)
continue continue
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
log.info("Adding last seen neighbor info") log.info("Adding last seen neighbor info")
for asls in ASLastSeen.objects.all(): for asls in ASLastSeen.objects.all():
if asls.asn not in net.nodes: if asls.asn not in net.nodes:
@ -113,6 +109,10 @@ def get_current_network():
_populate_node(net, neigh.asn) _populate_node(net, neigh.asn)
net.nodes[asls.asn]['online'] = False net.nodes[asls.asn]['online'] = False
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
log.info("Crawl done in %.2fs", time.time() - crawl_start) log.info("Crawl done in %.2fs", time.time() - crawl_start)
# add id to edges # add id to edges
@ -164,7 +164,7 @@ def _add_data_to_net(net, data):
if not as_path: if not as_path:
continue continue
orig_node = _populate_node(net, as_path[0]) orig_node = _populate_node(net, as_path[-1])
orig_node['prefixes'].add(route['prefix']) orig_node['prefixes'].add(route['prefix'])
for n in range(len(as_path) - 1): for n in range(len(as_path) - 1):
@ -196,4 +196,14 @@ def convert_crawl(crawl):
prefix = "{}/{}".format(ann.ip, ann.prefix) prefix = "{}/{}".format(ann.ip, ann.prefix)
path = list(map(int, ann.ASPath.split())) path = list(map(int, ann.ASPath.split()))
net.nodes[asn.number]['routing_table'].add((prefix, tuple(path))) net.nodes[asn.number]['routing_table'].add((prefix, tuple(path)))
net.nodes[path[-1]]['prefixes'].add(prefix) if path:
net.nodes[path[-1]]['prefixes'].add(prefix)
for path_asn in path:
if path_asn in net.nodes and path_asn not in (path[-1], path[0]):
net.nodes[path_asn]['stub'] = False
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
return net

@ -109,8 +109,9 @@ drag = simulation => {
.on("end", dragended); .on("end", dragended);
} }
let path_tokens = window.location.pathname.split("/")
d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => { let elem = path_tokens[path_tokens.length - 2]
d3.json("/api/v2/crawlrun/" + elem + "/?with_graph").then((data) => {
// Links data join // Links data join
data = data.graph data = data.graph
@ -151,9 +152,17 @@ d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
const link = svg const link = svg
.selectAll(".link") .selectAll(".link")
.data(data.links) .data(data.links)
.join((enter) => .join(enter =>
enter.append("line") enter.append("line")
.attr("class", "link")); .attr("class", "link"))
.style("stroke-width", l => {
let width = l.source.neighbors + l.target.neighbors + 2;
if(isNaN(width))
return 3
else
return width
})
// Nodes data join // Nodes data join
@ -171,13 +180,22 @@ d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
.attr("rx", d => d.stub ? 34 : (40 + (d.neighbors > 5 ? 5 : 0))) .attr("rx", d => d.stub ? 34 : (40 + (d.neighbors > 5 ? 5 : 0)))
.attr("ry", d => d.stub ? 12 : (20 + (d.neighbors > 5 ? 2 : 0))) .attr("ry", d => d.stub ? 12 : (20 + (d.neighbors > 5 ? 2 : 0)))
.attr("fill", function(d) { .attr("fill", function(d) {
//if(d.directly_crawled)
// return "#94FF70";
//else if(!d.online)
// return "#FFCCCC";
//else if(d.stub)
// return "#3291A8"
//else
// return "#D1FFC2";
if(d.directly_crawled) if(d.directly_crawled)
//return "#55bc32";
return "#94FF70"; return "#94FF70";
else if(!d.online) else if(!d.online)
return "#FFCCCC"; return "#FFCCCC";
// return "#F0FFEB";
else if(d.stub) else if(d.stub)
return "#3291A8" //return "#94FF70";
return "#E1FFE2";
else else
return "#D1FFC2"; return "#D1FFC2";
}) })

@ -7,21 +7,46 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dnmapper.settings")
import django import django
django.setup() django.setup()
from backend.crawler import convert_crawl from backend.crawler import convert_crawl, net_to_json
from bgpdata.models import CrawlRun from bgpdata.models import CrawlRun
def _convert_crawl(crawl):
net = convert_crawl(crawl)
if net.nodes and net.edges:
crawl.graph = net_to_json(net)
crawl.save()
print("Crawl {} updated".format(crawl.id))
else:
print("Crawl {} had no nodes or edges, abort".format(crawl.id))
def main(): def main():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("-c", "--crawl-id", type=int) parser.add_argument("-c", "--crawl-id", type=int)
parser.add_argument("-a", "--all", default=False, action="store_true")
parser.add_argument("-e", "--empty-graph-only", default=False, action="store_true")
args = parser.parse_args() args = parser.parse_args()
try: if args.crawl_id and args.all:
crawl = CrawlRun.objects.get(pk=args.crawl_id) parser.error("-c and -a don't work together")
except CrawlRun.DoesNotExist:
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id)) if args.crawl_id:
try:
crawl = CrawlRun.objects.get(pk=args.crawl_id)
except CrawlRun.DoesNotExist:
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
_convert_crawl(crawl)
elif args.all:
if args.empty_graph_only:
crawls = CrawlRun.objects.filter(graph='')
else:
crawls = CrawlRun.objects.all()
convert_crawl(crawl) for crawl in crawls:
_convert_crawl(crawl)
else:
parser.error("Either specify a crawl with -c or use -a for all")
if __name__ == '__main__': if __name__ == '__main__':

@ -4,3 +4,4 @@ django-rest-framework
django-filter django-filter
networkx networkx
grpcio grpcio
protobuf

Loading…
Cancel
Save