Remove some prints

This commit is contained in:
Sebastian Lohff 2020-06-10 03:09:16 +02:00
parent 214f9680c0
commit 99c02a84d6
2 changed files with 0 additions and 5 deletions

View File

@ -20,7 +20,6 @@ class CrawlRunSerializer(serializers.ModelSerializer):
def to_representation(self, instance): def to_representation(self, instance):
data = super().to_representation(instance) data = super().to_representation(instance)
print(data)
for elem in data: for elem in data:
if "graph" in data and isinstance(data['graph'], str): if "graph" in data and isinstance(data['graph'], str):
data['graph'] = json.loads(data['graph']) data['graph'] = json.loads(data['graph'])

View File

@ -31,7 +31,6 @@ def crawl():
if asn not in last_seen: if asn not in last_seen:
last_seen[asn] = ASLastSeen(asn=asn) last_seen[asn] = ASLastSeen(asn=asn)
asls = last_seen[asn] asls = last_seen[asn]
# print(asn, node)
asls.online = node['online'] asls.online = node['online']
if node['online']: if node['online']:
asls.directlyCrawled = node['directly_crawled'] asls.directlyCrawled = node['directly_crawled']
@ -107,7 +106,6 @@ def get_current_network():
net.nodes[asls.asn]['online'] = False net.nodes[asls.asn]['online'] = False
log.info("Crawl done in %.2fs", time.time() - crawl_start) log.info("Crawl done in %.2fs", time.time() - crawl_start)
print("{} nodes, {} edges".format(len(net.nodes), len(net.edges)))
# add id to edges # add id to edges
for n, (_, _, data) in enumerate(net.edges(data=True)): for n, (_, _, data) in enumerate(net.edges(data=True)):
@ -188,5 +186,3 @@ def convert_crawl(crawl):
path = list(map(int, ann.ASPath.split())) path = list(map(int, ann.ASPath.split()))
net.nodes[asn.number]['routing_table'].add((prefix, tuple(path))) net.nodes[asn.number]['routing_table'].add((prefix, tuple(path)))
net.nodes[path[-1]]['prefixes'].add(prefix) net.nodes[path[-1]]['prefixes'].add(prefix)
print(net_to_json(net))