diff --git a/apiv2/serializers.py b/apiv2/serializers.py index ffe7639..a6d8735 100644 --- a/apiv2/serializers.py +++ b/apiv2/serializers.py @@ -20,7 +20,6 @@ class CrawlRunSerializer(serializers.ModelSerializer): def to_representation(self, instance): data = super().to_representation(instance) - print(data) for elem in data: if "graph" in data and isinstance(data['graph'], str): data['graph'] = json.loads(data['graph']) diff --git a/backend/crawler.py b/backend/crawler.py index 22e025a..526c1b5 100644 --- a/backend/crawler.py +++ b/backend/crawler.py @@ -31,7 +31,6 @@ def crawl(): if asn not in last_seen: last_seen[asn] = ASLastSeen(asn=asn) asls = last_seen[asn] - # print(asn, node) asls.online = node['online'] if node['online']: asls.directlyCrawled = node['directly_crawled'] @@ -107,7 +106,6 @@ def get_current_network(): net.nodes[asls.asn]['online'] = False log.info("Crawl done in %.2fs", time.time() - crawl_start) - print("{} nodes, {} edges".format(len(net.nodes), len(net.edges))) # add id to edges for n, (_, _, data) in enumerate(net.edges(data=True)): @@ -188,5 +186,3 @@ def convert_crawl(crawl): path = list(map(int, ann.ASPath.split())) net.nodes[asn.number]['routing_table'].add((prefix, tuple(path))) net.nodes[path[-1]]['prefixes'].add(prefix) - - print(net_to_json(net))