Browse Source

Improve conv tool to convert all old graphs

Sebastian Lohff 1 year ago
parent
commit
90cbce9550
1 changed files with 29 additions and 10 deletions
  1. 29
    10
      bin/conv.py

+ 29
- 10
bin/conv.py View File

@@ -11,23 +11,42 @@ from backend.crawler import convert_crawl, net_to_json
11 11
 from bgpdata.models import CrawlRun
12 12
 
13 13
 
14
+def _convert_crawl(crawl):
15
+    net = convert_crawl(crawl)
16
+    if net.nodes and net.edges:
17
+        crawl.graph = net_to_json(net)
18
+        crawl.save()
19
+        print("Crawl {} updated".format(crawl.id))
20
+    else:
21
+        print("Crawl {} had no nodes or edges, abort".format(crawl.id))
22
+
23
+
14 24
 def main():
15 25
     parser = argparse.ArgumentParser()
16 26
     parser.add_argument("-c", "--crawl-id", type=int)
27
+    parser.add_argument("-a", "--all", default=False, action="store_true")
28
+    parser.add_argument("-e", "--empty-graph-only", default=False, action="store_true")
17 29
     args = parser.parse_args()
18 30
 
19
-    try:
20
-        crawl = CrawlRun.objects.get(pk=args.crawl_id)
21
-    except CrawlRun.DoesNotExist:
22
-        parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
31
+    if args.crawl_id and args.all:
32
+        parser.error("-c and -a don't work together")
23 33
 
24
-    net = convert_crawl(crawl)
25
-    if net.nodes and net.edges:
26
-        crawl.graph = net_to_json(net)
27
-        crawl.save()
28
-        print("Crawl updated")
34
+    if args.crawl_id:
35
+        try:
36
+            crawl = CrawlRun.objects.get(pk=args.crawl_id)
37
+        except CrawlRun.DoesNotExist:
38
+            parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
39
+        _convert_crawl(crawl)
40
+    elif args.all:
41
+        if args.empty_graph_only:
42
+            crawls = CrawlRun.objects.filter(graph='')
43
+        else:
44
+            crawls = CrawlRun.objects.all()
45
+
46
+        for crawl in crawls:
47
+            _convert_crawl(crawl)
29 48
     else:
30
-        print("Crawl had no nodes or edges, abort")
49
+        parser.error("Either specify a crawl with -c or use -a for all")
31 50
 
32 51
 
33 52
 if __name__ == '__main__':

Loading…
Cancel
Save