Browse Source

Remove some prints

master
Sebastian Lohff 2 years ago
parent
commit
99c02a84d6
  1. 1
      apiv2/serializers.py
  2. 4
      backend/crawler.py

1
apiv2/serializers.py

@ -20,7 +20,6 @@ class CrawlRunSerializer(serializers.ModelSerializer): @@ -20,7 +20,6 @@ class CrawlRunSerializer(serializers.ModelSerializer):
def to_representation(self, instance):
data = super().to_representation(instance)
print(data)
for elem in data:
if "graph" in data and isinstance(data['graph'], str):
data['graph'] = json.loads(data['graph'])

4
backend/crawler.py

@ -31,7 +31,6 @@ def crawl(): @@ -31,7 +31,6 @@ def crawl():
if asn not in last_seen:
last_seen[asn] = ASLastSeen(asn=asn)
asls = last_seen[asn]
# print(asn, node)
asls.online = node['online']
if node['online']:
asls.directlyCrawled = node['directly_crawled']
@ -107,7 +106,6 @@ def get_current_network(): @@ -107,7 +106,6 @@ def get_current_network():
net.nodes[asls.asn]['online'] = False
log.info("Crawl done in %.2fs", time.time() - crawl_start)
print("{} nodes, {} edges".format(len(net.nodes), len(net.edges)))
# add id to edges
for n, (_, _, data) in enumerate(net.edges(data=True)):
@ -188,5 +186,3 @@ def convert_crawl(crawl): @@ -188,5 +186,3 @@ def convert_crawl(crawl):
path = list(map(int, ann.ASPath.split()))
net.nodes[asn.number]['routing_table'].add((prefix, tuple(path)))
net.nodes[path[-1]]['prefixes'].add(prefix)
print(net_to_json(net))

Loading…
Cancel
Save