Compare commits

...

11 Commits

  1. 8
      backend/cmk_parser.py
  2. 22
      backend/crawler.py
  3. 30
      bgpdata/templates/bgpdata/new_new_map.html
  4. 37
      bin/conv.py
  5. 1
      requirements.txt

8
backend/cmk_parser.py

@ -45,7 +45,7 @@ def parseBGPData(raw, asno): @@ -45,7 +45,7 @@ def parseBGPData(raw, asno):
# mkify
raw = d.group(2).split("\n")
arr = filter(lambda _z: _z, map(lambda _y: filter(lambda _x: _x, re.split(r"\s+", _y)), raw))
arr = list(filter(lambda _z: _z, map(lambda _y: list(filter(lambda _x: _x, re.split(r"\s+", _y))), raw)))
# parse for bird/quagga
result = None
@ -80,7 +80,7 @@ def parseBird(data, raw, asno): @@ -80,7 +80,7 @@ def parseBird(data, raw, asno):
status = _birdFindTable(data, "show status")
if status[2][0] != "1011-Router":
err("Couldn't find router id in bird output")
peers = filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data))
peers = list(filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data)))
if asno == None:
err("Host is bird")
@ -135,7 +135,7 @@ def _birdMakeProtocols(info): @@ -135,7 +135,7 @@ def _birdMakeProtocols(info):
# state (established, active)
# if error, last error is avilable
protocols = []
for proto, data in _birdFindProtocols(info).iteritems():
for proto, data in _birdFindProtocols(info).items():
protoInfo = {
"name": proto,
"type": data[0][1],
@ -343,7 +343,7 @@ def _quaggaFindRoutes(raw): @@ -343,7 +343,7 @@ def _quaggaFindRoutes(raw):
d["network"] = lastIP
# "parse" path (everything after 61 chars, but no i)
path = filter(lambda _x: _x not in ('', 'i'), line[61:].split(" "))
path = list(filter(lambda _x: _x not in ('', 'i'), line[61:].split(" ")))
# currently skip incomplete routes
if '?' not in path:

22
backend/crawler.py

@ -98,10 +98,6 @@ def get_current_network(): @@ -98,10 +98,6 @@ def get_current_network():
log.exception("Could not get data from host %s method %s", host, host.checkMethod)
continue
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
log.info("Adding last seen neighbor info")
for asls in ASLastSeen.objects.all():
if asls.asn not in net.nodes:
@ -113,6 +109,10 @@ def get_current_network(): @@ -113,6 +109,10 @@ def get_current_network():
_populate_node(net, neigh.asn)
net.nodes[asls.asn]['online'] = False
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
log.info("Crawl done in %.2fs", time.time() - crawl_start)
# add id to edges
@ -164,7 +164,7 @@ def _add_data_to_net(net, data): @@ -164,7 +164,7 @@ def _add_data_to_net(net, data):
if not as_path:
continue
orig_node = _populate_node(net, as_path[0])
orig_node = _populate_node(net, as_path[-1])
orig_node['prefixes'].add(route['prefix'])
for n in range(len(as_path) - 1):
@ -196,4 +196,14 @@ def convert_crawl(crawl): @@ -196,4 +196,14 @@ def convert_crawl(crawl):
prefix = "{}/{}".format(ann.ip, ann.prefix)
path = list(map(int, ann.ASPath.split()))
net.nodes[asn.number]['routing_table'].add((prefix, tuple(path)))
net.nodes[path[-1]]['prefixes'].add(prefix)
if path:
net.nodes[path[-1]]['prefixes'].add(prefix)
for path_asn in path:
if path_asn in net.nodes and path_asn not in (path[-1], path[0]):
net.nodes[path_asn]['stub'] = False
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
return net

30
bgpdata/templates/bgpdata/new_new_map.html

@ -109,8 +109,9 @@ drag = simulation => { @@ -109,8 +109,9 @@ drag = simulation => {
.on("end", dragended);
}
d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
let path_tokens = window.location.pathname.split("/")
let elem = path_tokens[path_tokens.length - 2]
d3.json("/api/v2/crawlrun/" + elem + "/?with_graph").then((data) => {
// Links data join
data = data.graph
@ -151,9 +152,17 @@ d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => { @@ -151,9 +152,17 @@ d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
const link = svg
.selectAll(".link")
.data(data.links)
.join((enter) =>
.join(enter =>
enter.append("line")
.attr("class", "link"));
.attr("class", "link"))
.style("stroke-width", l => {
let width = l.source.neighbors + l.target.neighbors + 2;
if(isNaN(width))
return 3
else
return width
})
// Nodes data join
@ -171,13 +180,22 @@ d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => { @@ -171,13 +180,22 @@ d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
.attr("rx", d => d.stub ? 34 : (40 + (d.neighbors > 5 ? 5 : 0)))
.attr("ry", d => d.stub ? 12 : (20 + (d.neighbors > 5 ? 2 : 0)))
.attr("fill", function(d) {
//if(d.directly_crawled)
// return "#94FF70";
//else if(!d.online)
// return "#FFCCCC";
//else if(d.stub)
// return "#3291A8"
//else
// return "#D1FFC2";
if(d.directly_crawled)
//return "#55bc32";
return "#94FF70";
else if(!d.online)
return "#FFCCCC";
// return "#F0FFEB";
else if(d.stub)
return "#3291A8"
//return "#94FF70";
return "#E1FFE2";
else
return "#D1FFC2";
})

37
bin/conv.py

@ -7,21 +7,46 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dnmapper.settings") @@ -7,21 +7,46 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dnmapper.settings")
import django
django.setup()
from backend.crawler import convert_crawl
from backend.crawler import convert_crawl, net_to_json
from bgpdata.models import CrawlRun
def _convert_crawl(crawl):
net = convert_crawl(crawl)
if net.nodes and net.edges:
crawl.graph = net_to_json(net)
crawl.save()
print("Crawl {} updated".format(crawl.id))
else:
print("Crawl {} had no nodes or edges, abort".format(crawl.id))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--crawl-id", type=int)
parser.add_argument("-a", "--all", default=False, action="store_true")
parser.add_argument("-e", "--empty-graph-only", default=False, action="store_true")
args = parser.parse_args()
try:
crawl = CrawlRun.objects.get(pk=args.crawl_id)
except CrawlRun.DoesNotExist:
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
if args.crawl_id and args.all:
parser.error("-c and -a don't work together")
if args.crawl_id:
try:
crawl = CrawlRun.objects.get(pk=args.crawl_id)
except CrawlRun.DoesNotExist:
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
_convert_crawl(crawl)
elif args.all:
if args.empty_graph_only:
crawls = CrawlRun.objects.filter(graph='')
else:
crawls = CrawlRun.objects.all()
convert_crawl(crawl)
for crawl in crawls:
_convert_crawl(crawl)
else:
parser.error("Either specify a crawl with -c or use -a for all")
if __name__ == '__main__':

1
requirements.txt

@ -4,3 +4,4 @@ django-rest-framework @@ -4,3 +4,4 @@ django-rest-framework
django-filter
networkx
grpcio
protobuf

Loading…
Cancel
Save