blob: 25149dd02fde76218055a5056c489dead8f28f15 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
|
import numpy as np
def normalize_file(filename):
"""
Normalizes file:
If nodes are not numbered 0 to number_of_nodes - 1, then prints
normalized version of dataset in the same directory
"""
#Read number of unique node identifications
nodes = []
with open(filename, "r") as f:
for line in f:
if "#" not in line:
node_1, node_2 = line.split()
nodes.append(node_1); nodes.append(node_2)
uniq_nodes = np.unique(nodes)
#Hash nodes to an index between 0 and number_of_nodes - 1
hash_nodes = {}
for idx, node in enumerate(uniq_nodes):
hash_nodes[node] = idx
#Write to file
with open(filename[:-4]+"normalize.txt", "w") as g:
with open(filename, "r") as f:
for line_f in f:
f_node_1, f_node_2 = line_f.split()
g_node_1 = hash_nodes[f_node_1]
g_node_2 = hash_nodes[f_node_2]
g.write(str(g_node_1)+" "+str(g_node_2)+"\n")
|