Commit 4566d33d authored by Guillaume RENTON's avatar Guillaume RENTON

Ajout du dossier graph-learning et des fichiers correspondant

parent bf414baa
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 31 14:55:43 2017
@author: guilaume
"""
import networkx as nx
from scipy.optimize import linear_sum_assignment
import numpy as np
from functions import calcul_distance_node,calcul_distance_edge, A_star
from load_gxl import create_graph_from_gxl
import matplotlib.pyplot as plt
from graphfiles import loadGXL
import xml.etree.ElementTree as ET
def calcul_cost_edge(edges_source,edges_target, c_edge_sub=None, c_edge_del_ins = None):
n = len(edges_source)
m = len(edges_target)
C_star = np.full((n+m,n+m), float('inf'))
for i in range(n+m):
for j in range(n+m):
if i<len(edges_source) and j<len(edges_target):
edge_1 = edges_source[list(edges_source.keys())[i]]
edge_2 = edges_target[list(edges_target.keys())[j]]
C_star[i][j]= calcul_distance_edge(edge_1,edge_2, c_edge_sub)
elif i < len(edges_source) and j == m+i:
C_star[i][j]=1 if c_edge_del_ins is None else c_edge_del_ins
elif j < len(edges_target) and i == n+j:
C_star[i][j]=1 if c_edge_del_ins is None else c_edge_del_ins
elif i >= len(edges_source) and j >= len(edges_target):
C_star[i][j]=0
hungarian = linear_sum_assignment(C_star)
total_cost=0
for i in range(len(hungarian[0])):
total_cost = total_cost+C_star[hungarian[0][i]][hungarian[1][i]]
return total_cost
def create_cost_matrix(graph_source, graph_target, c_edge_sub=None, c_edge_del_ins = None, c_node_sub = None, c_node_del_ins =None):
n = len(graph_source.nodes())
m = len(graph_target.nodes())
C_star_node = np.full((n+m,n+m), float('inf'))
C_star_edge = np.full((n+m,n+m), float(0))
for i in range(n+m):
for j in range(n+m):
if i<len(graph_source.nodes()) and j<len(graph_target.nodes()):
C_star_node[i][j]=calcul_distance_node(graph_source.node[list(graph_source.nodes())[i]], graph_target.node[list(graph_target.nodes())[j]], c_node_sub=c_node_sub)
C_star_edge[i][j]=calcul_cost_edge(graph_source[list(graph_source.nodes())[i]], graph_target[list(graph_target.nodes())[j]], c_edge_sub=c_edge_sub, c_edge_del_ins=c_edge_del_ins)
elif i < len(graph_source.nodes()) and j == m+i:
C_star_node[i][j]=1 if c_node_del_ins is None else c_node_del_ins
C_star_edge[i][j]=len(graph_source[list(graph_source.nodes())[i]]) if c_edge_del_ins is None else len(graph_source[list(graph_source.nodes())[i]])*c_edge_del_ins
elif j < len(graph_target.nodes()) and i == n+j:
C_star_node[i][j]=1 if c_node_del_ins is None else c_node_del_ins
C_star_edge[i][j]=len(graph_target[list(graph_target.nodes())[j]]) if c_edge_del_ins is None else len(graph_target[list(graph_target.nodes())[j]])*c_edge_del_ins
elif i >= len(graph_source.nodes()) and j >= len(graph_target.nodes()):
C_star_node[i][j]=0
C_star = C_star_node+C_star_edge
return C_star_node,C_star, C_star_edge
def calcul_adjacency_extended(graph_source, graph_target):
n = len(graph_source.nodes())
m = len(graph_target.nodes())
adjacency_source = np.full((n+m,n+m), float(0))
adjacency_target = np.full((n+m,n+m), float(0))
for i in range(n+m):
for j in range(n+m):
if i<len(graph_source.nodes()) and j<len(graph_source.nodes()):
node_source_1 = list(graph_source.nodes())[i]
node_source_2 = list(graph_source.nodes())[j]
edge_source_1=(node_source_1,node_source_2)
edge_source_2=(node_source_2,node_source_1)
if edge_source_1 in graph_source.edges() or edge_source_2 in graph_source.edges():
adjacency_source[i][j]=1
if i<len(graph_target.nodes()) and j<len(graph_target.nodes()):
node_target_1 = list(graph_target.nodes())[i]
node_target_2 = list(graph_target.nodes())[j]
edge_target_1=(node_target_1,node_target_2)
edge_target_2=(node_target_2,node_target_1)
if edge_target_1 in graph_target.edges() or edge_target_2 in graph_target.edges():
adjacency_target[i][j]=1
return adjacency_source, adjacency_target
def calcul_Munkres(graph_source,graph_target,C_nodes,C_star,c_edge_del_ins=1):
hungarian = linear_sum_assignment(C_star)
upper_bound=0.0
lower_bound=0.0
adjacency_source,adjacency_target = calcul_adjacency_extended(graph_source,graph_target)
for i in range(len(hungarian[0])):
upper_bound += C_nodes[hungarian[0][i]][hungarian[1][i]]
lower_bound += C_nodes[hungarian[0][i]][hungarian[1][i]]
if c_edge_del_ins == None:
c_edge_del_ins=1
for i in range(len(hungarian[0])):
for j in range(i+1,len(hungarian[0])):
upper_bound+=abs(adjacency_source[i][j]-adjacency_target[hungarian[1][i]][hungarian[1][j]])*c_edge_del_ins
lower_bound+=0.5*abs(adjacency_source[i][j]-adjacency_target[hungarian[1][i]][hungarian[1][j]])*c_edge_del_ins
return upper_bound,lower_bound, hungarian
def create_matrix_and_permute_it(graph_source, graph_target, c_edge_sub=None, c_edge_del_ins = None, c_node_sub = None, c_node_del_ins =None):
C_node,C_star, C_edge = create_cost_matrix(graph_source, graph_target,c_node_sub=c_node_sub,c_node_del_ins=c_node_del_ins,c_edge_sub=c_edge_sub,c_edge_del_ins=c_edge_del_ins)
upper_bound,lower_bound, permutation = calcul_Munkres(graph_source,graph_target,C_node,C_star,c_edge_del_ins=c_edge_del_ins)
C_node_permute = C_node[:,permutation[1]]
C_edge_permute = C_edge[:,permutation[1]]
out = (C_node_permute, C_edge_permute, upper_bound, lower_bound, permutation)
return out
if __name__ == "__main__":
path ="/home/renton/Documents/Thèse/Code/graph-matching-toolkit-master/data/Letter/"
filename="small.xml"
tree = ET.parse(path+filename)
liste_file = []
for node in tree.findall(".//print"):
fichier = node.attrib["file"]
liste_file.append(fichier)
print(liste_file)
for i in range(len(liste_file)):
for j in range(len(liste_file)):
graph1= create_graph_from_gxl(path+liste_file[i])
graph2= create_graph_from_gxl(path+liste_file[j])
C_node, C_star = create_cost_matrix(graph1,graph2,c_node_sub=None,c_node_del_ins=4,c_edge_sub=None,c_edge_del_ins=1)
upper,lower,permutation=calcul_Munkres(graph1,graph2,C_node,C_star,c_edge_del_ins=1)
print(upper,lower)
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 6 15:44:11 2017
@author: guilaume
"""
import pickle
from Bipartite import calcul_Munkres,create_cost_matrix
from load_gxl import create_graph_from_gxl
if __name__ == "__main__":
with open("LETTER_HIGH/base_app.p","rb") as fp:
app=pickle.load(fp, encoding="bytes")
with open("LETTER_HIGH/base_valid.p","rb") as fp:
valid = pickle.load(fp,encoding="bytes")
print(len(app))
print(len(valid))
MSE = 0
path = "../Datasets/Letter/HIGH/"
for fileapp in app:
graph1=create_graph_from_gxl(path+fileapp[4])
graph2=create_graph_from_gxl(path+fileapp[5])
c_star_node, c_star = create_cost_matrix(graph1,graph2)
cost,lower,h = calcul_Munkres(graph1,graph2,c_star_node,c_star)
MSE += (cost - fileapp[1])**2
print(MSE/len(app))
MSE = 0
for filevalid in valid:
graph1=create_graph_from_gxl(path+filevalid[4])
graph2=create_graph_from_gxl(path+filevalid[5])
c_star_node, c_star = create_cost_matrix(graph1,graph2)
cost,lower,h = calcul_Munkres(graph1,graph2,c_star_node,c_star)
MSE += (cost - filevalid[1])**2
print(MSE/len(valid))
\ No newline at end of file
from costfunctions import ConstantCostFunction, RiesenCostFunction
from costfunctions import NeighboorhoodCostFunction
from bipartiteGED import computeBipartiteCostMatrix, getOptimalMapping
from scipy.optimize import linear_sum_assignment
from graphfiles import loadGXL
def ged(G1, G2, method='Riesen', rho=None, varrho=None,
cf=ConstantCostFunction(1, 3, 1, 3),
solver=linear_sum_assignment):
"""Compute Graph Edit Distance between G1 and G2 according to mapping
encoded within rho and varrho. Graph's node must be indexed by a
index which is used is rho and varrho
NB: Utilisation de
dictionnaire pour etre plus versatile ?
"""
if ((rho is None) or (varrho is None)):
if(method == 'Riesen'):
cf_bp = RiesenCostFunction(cf,lsap_solver=solver)
elif(method == 'Neighboorhood'):
cf_bp = NeighboorhoodCostFunction(cf,lsap_solver=solver)
elif(method == 'Basic'):
cf_bp = cf
else:
raise NameError('Non existent method ')
C=computeBipartiteCostMatrix(G1, G2, cf_bp)
rho, varrho = getOptimalMapping(C, lsap_solver=solver)
n = G1.number_of_nodes()
m = G2.number_of_nodes()
ged = 0
for i in G1.nodes():
phi_i = rho[i]
if(phi_i >= m):
ged += cf.cnd(i, G1)
else:
ged += cf.cns(i, phi_i, G1, G2)
for j in G2.nodes():
phi_j = varrho[j]
if(phi_j >= n):
ged += cf.cni(j, G2)
for e in G1.edges(data=True):
i = e[0]
j = e[1]
phi_i = rho[i]
phi_j = rho[j]
if (phi_i < m) and (phi_j < m):
mappedEdge = len(list(filter(lambda x: True if
x == phi_j else False, G2[phi_i])))
if(mappedEdge):
e2 = [phi_i, phi_j, G2[phi_i][phi_j]]
min_cost = min(cf.ces(e, e2, G1, G2),
cf.ced(e, G1) + cf.cei(e2, G2))
ged += min_cost
else:
ged += cf.ced(e, G1)
else:
ged += cf.ced(e, G1)
for e in G2.edges(data=True):
i = e[0]
j = e[1]
phi_i = varrho[i]
phi_j = varrho[j]
if (phi_i < n) and (phi_j < n):
mappedEdge = len(list(filter(lambda x: True if x == phi_j
else False, G1[phi_i])))
if(not mappedEdge):
ged += cf.cei(e, G2)
else:
ged += cf.ced(e, G2)
return ged, rho, varrho, C
if __name__=="__main__":
graph1 = loadGXL("../Datasets/gdc-c1/acyclic/dimethyl_ether.gxl")
graph2 = loadGXL("../Datasets/gdc-c1/acyclic/dimethyl_peroxide.gxl")
graph3 = loadGXL("../Datasets/gdc-c1/acyclic/dimethyl_sulfide.gxl")
graph4 = loadGXL("../Datasets/gdc-c1/acyclic/methyl_nonyl_sulfide.gxl")
graph5 = loadGXL("../Datasets/gdc-c1/acyclic/di-tert-butyl_disulfide.gxl")
# print ged(graph1,graph2, cf=ConstantCostFunction(2,4,1,1))
# print ged(graph1,graph3, cf=ConstantCostFunction(2,4,1,1))
# print ged(graph1,graph4, cf=ConstantCostFunction(2,4,1,1))
# print ged(graph2,graph3, cf=ConstantCostFunction(2,4,1,1))
# print graph2.nodes
# print graph4.nodes
# print ged(graph1,graph5, cf=ConstantCostFunction(2,4,1,1))
## print ged(graph3,graph4, cf=ConstantCostFunction(2,4,1,1))
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 11 3 17:15:26 2017
@author: guillaume
"""
import pickle
from sklearn.svm import SVR
import numpy as np
from tqdm import tqdm
import time
def computeSVR(path):
with open(path + "base_app.p", "rb") as fp:
baseapp = pickle.load(fp, encoding="bytes")
with open(path + "base_valid.p", "rb") as fp:
basevalid = pickle.load(fp, encoding="bytes")
with open(path + "base_test.p", "rb") as fp:
basetest = pickle.load(fp, encoding="bytes")
X = np.full((len(baseapp) + len(basevalid), 2), 0.0)
y = np.full((len(baseapp) + len(basevalid)), 0.0)
for i in tqdm(range(len(baseapp))):
X[i][0] = baseapp[i][2]
X[i][1] = baseapp[i][3]
y[i] = baseapp[i][1]
t=time.time()
clf = SVR(C=16, gamma=0.03125)
clf.fit(X, y)
print("fit done in ", time.time()-t)
X = np.full((len(basetest), 2), 0.0)
y = np.full((len(basetest)), 0.0)
for i in tqdm(range(len(basetest))):
X[i][0] = basetest[i][2]
X[i][1] = basetest[i][3]
y[i] = basetest[i][1]
svr_prediction = clf.predict(X)
mse = 0
mre = 0
for i in tqdm(range(len(svr_prediction))):
mse += (svr_prediction[i] - y[i])**2
if y[i]!=0:
mre += abs((svr_prediction[i]-y[i])/y[i])
else:
mre += abs(svr_prediction[i] - y[i])
print("Valeur du MSE : ", mse/len(basetest))
print("Valeur du MRE : ", 100*mre/len(basetest))
if __name__ == "__main__":
path = "LETTER_HIGH/"
computeSVR(path)
import numpy as np
from scipy.optimize import linear_sum_assignment
from costfunctions import ConstantCostFunction
def computeBipartiteCostMatrix(G1, G2, cf=ConstantCostFunction(1, 3, 1, 3)):
"""Compute a Cost Matrix according to cost function cf"""
n = G1.number_of_nodes()
m = G2.number_of_nodes()
nm = n + m
C = np.ones([nm, nm])*np.inf
C[n:, m:] = 0
for u in G1.nodes():
for v in G2.nodes():
cost = cf.cns(u, v, G1, G2)
C[u, v] = cost
for v in G1.nodes():
C[v, m + v] = cf.cnd(v, G1)
for v in G2.nodes():
C[n + v, v] = cf.cni(v, G2)
return C
def getOptimalMapping(C, lsap_solver=linear_sum_assignment):
"""Compute an optimal linear mapping according to cost Matrix C
inclure les progs C de Seb
"""
row_ind, col_ind = lsap_solver(C)
return col_ind, row_ind[np.argsort(col_ind)]
import numpy as np
from scipy.optimize import linear_sum_assignment
class ConstantCostFunction:
""" Define a symmetric constant cost fonction for edit operations """
def __init__(self, cns, cni, ces, cei):
self.cns_ = cns
self.cni_ = self.cnd_ = cni
self.ces_ = ces
self.cei_ = self.ced_ = cei
def cns(self, node_u, node_v, g1, g2):
""" return substitution edit operation cost between node_u of G1 and node_v of G2"""
return (g1.node[node_u]['label'] != g2.node[node_v]['label'])*self.cns_
def cnd(self, u, G1):
return self.cnd_
def cni(self, v, G2):
return self.cni_
def ces(self, e1, e2, G1, G2):
"""tester avec des attributs autres que symboliques en testant
l'operateur __eq__"""
return (e1[2]['label'] != e2[2]['label'])*self.ces_
def ced(self, e1, G1):
return self.ced_
def cei(self, e2, G2):
return self.cei_
class RiesenCostFunction():
""" Cost function associated to the computation of a cost matrix between nodes for LSAP"""
def __init__(self, cf, lsap_solver=linear_sum_assignment):
self.cf_ = cf
self.lsap_solver_ = lsap_solver
def cns(self, u, v, G1, G2):
""" u et v sont des id de noeuds """
n = len(G1[u])
m = len(G2[v])
sub_C = np.ones([n+m, n+m]) * np.inf
sub_C[n:, m:] = 0
i = 0
l_nbr_u = G1[u]
l_nbr_v = G2[v]
for nbr_u in l_nbr_u:
j = 0
e1 = [u, nbr_u, G1[u][nbr_u]]
for nbr_v in G2[v]:
e2 = [v, nbr_v, G2[v][nbr_v]]
sub_C[i, j] = self.cf_.ces(e1, e2, G1, G2)
j += 1
i += 1
i = 0
for nbr_u in l_nbr_u:
sub_C[i, m+i] = self.cf_.ced([u, nbr_u, G1[u][nbr_u]], G1)
i += 1
j = 0
for nbr_v in l_nbr_v:
sub_C[n+j, j] = self.cf_.cei([v, nbr_v, G2[v][nbr_v]], G2)
j += 1
row_ind, col_ind = self.lsap_solver_(sub_C)
cost = np.sum(sub_C[row_ind, col_ind])
return self.cf_.cns(u, v, G1, G2) + cost
def cnd(self, u, G1):
cost = 0
for nbr in G1[u]:
cost += self.cf_.ced([u,nbr,G1[u][nbr]],G1)
return self.cf_.cnd(u,G1) + cost
def cni(self, v, G2):
cost = 0
for nbr in G2[v]:
cost += self.cf_.cei([v,nbr,G2[v][nbr]], G2)
return self.cf_.cni(v, G2) + cost
class NeighboorhoodCostFunction():
""" Cost function associated to the computation of a cost matrix between nodes for LSAP"""
def __init__(self, cf, lsap_solver=linear_sum_assignment):
self.cf_ = cf
self.lsap_solver_ = lsap_solver
def cns(self, u, v, G1, G2):
""" u et v sont des id de noeuds """
n = len(G1[u])
m = len(G2[v])
sub_C = np.ones([n+m, n+m]) * np.inf
sub_C[n:, m:] = 0
i = 0
l_nbr_u = G1[u]
l_nbr_v = G2[v]
for nbr_u in l_nbr_u:
j = 0
e1 = [u, nbr_u, G1[u][nbr_u]]
for nbr_v in G2[v]:
e2 = [v, nbr_v, G2[v][nbr_v]]
sub_C[i, j] = self.cf_.ces(e1, e2, G1, G2)
sub_C[i, j] += self.cf_.cns(nbr_u, nbr_v, G1, G2)
j += 1
i += 1
i = 0
for nbr_u in l_nbr_u:
sub_C[i, m+i] = self.cf_.ced([u, nbr_u, G1[u][nbr_u]], G1)
sub_C[i, m+i] += self.cf_.cnd(nbr_u, G1)
i += 1
j = 0
for nbr_v in l_nbr_v:
sub_C[n+j, j] = self.cf_.cei([v, nbr_v, G2[v][nbr_v]], G2)
sub_C[n+j, j] += self.cf_.cni(nbr_v, G2)
j += 1
row_ind, col_ind = self.lsap_solver_(sub_C)
cost = np.sum(sub_C[row_ind, col_ind])
return self.cf_.cns(u, v, G1, G2) + cost
def cnd(self, u, G1):
cost = 0
for nbr in G1[u]:
cost += self.cf_.ced([u, nbr, G1[u][nbr]], G1)
return self.cf_.cnd(u, G1) + cost
def cni(self, v, G2):
cost = 0
for nbr in G2[v]:
cost += self.cf_.cei([v, nbr, G2[v][nbr]], G2)
return self.cf_.cni(v, G2) + cost
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 27 13:39:14 2017
@author: guilaume
"""
import networkx as nx
import xml.etree.ElementTree as ET
from load_gxl import create_graph_from_gxl
import sys
import math
import time
def calcul_distance_edge(edge_source,edge_target, c_edge_sub=None):
if not type(edge_source)==dict:
return 0
if not edge_source.keys() == edge_target.keys():
print("Error : edgess attributes aren't the same\n edge 1 : ", edge1.keys(),"\n node 2 ",edge2.keys())
sys.exit()
d = 0
if c_edge_sub != None:
key = list(edge_source.keys())
if key :
d = c_edge_sub if edge_source[key[0]] != edge_target[key[0]] else 0
else:
d = c_edge_sub
else:
for key in edge_source.keys():
d+= (float(edge_source[key])-float(edge_target[key]))**2
d = math.sqrt(d)
# print "distance edge",d
return d
def calcul_distance_node(node1,node2, c_node_sub=None):
if not node1.keys() == node2.keys():
print("Error : nodes attributes aren't the same\n node 1 : ", node1.keys(),"\n node 2 ",node2.keys())
sys.exit()
d = 0
if c_node_sub != None:
key = list(node1.keys())
d = c_node_sub if node1[key[0]] != node2[key[0]] else 0
else:
for key in node1.keys():
d+= (float(node1[key])-float(node2[key]))**2
d = math.sqrt(d)
# print "distance node",node1,node2,d
return d
def A_star(graph_source, graph_target, c_node_sub = None, c_node_del_ins = None, c_edge_sub = None, c_edge_del_ins = None):
OPEN = []
liste_node_source = list(graph_source.nodes())
for node in graph_target.nodes():
tmp = calcul_distance_node(graph_source.node[liste_node_source[0]],graph_target.node[node], c_node_sub=c_node_sub)
OPEN.append([(liste_node_source[0], node, tmp)])
eps = [(liste_node_source[0],"eps",1.0)] if c_node_del_ins is None else [(liste_node_source[0],"eps",c_node_del_ins)]
OPEN.append(eps)
complete = set(graph_target.nodes())
while(1):
p = OPEN.index(min(OPEN, key=lambda t:t[-1][2]))
liste_node = set(item[1] for item in OPEN[p])
lambda_min = list(OPEN[p])
if complete.issubset(liste_node) and len(lambda_min) >= len(liste_node_source):
return OPEN[p]
OPEN.remove(lambda_min)
if len(lambda_min) < len(liste_node_source):
for node_target in graph_target.nodes():
if node_target not in liste_node:
cost = lambda_min[-1][-1] + calcul_distance_node(graph_source.node[liste_node_source[len(lambda_min)]],graph_target.node[node_target], c_node_sub=c_node_sub)
for matching in lambda_min:
edge_source_1 = (liste_node_source[len(lambda_min)],matching[0])
edge_source_2 = (matching[0],liste_node_source[len(lambda_min)])
edge_target_1 = (node_target, matching[1])
edge_target_2 = (matching[1],node_target)
if (edge_source_1 in graph_source.edges() or edge_source_2 in graph_source.edges()) and (edge_target_1 in graph_target.edges() or edge_target_2 in graph_target.edges()):
edge_source = edge_source_1 if edge_source_1 in graph_source.edges() else edge_source_2
edge_target = edge_target_1 if edge_target_1 in graph_target.edges() else edge_target_2
cost += calcul_distance_edge(graph_source[edge_source[0]][edge_source[1]], graph_target[edge_target[0]][edge_target[1]], c_edge_sub=c_edge_sub)
elif edge_source_1 not in graph_source.edges() and edge_source_2 not in graph_source.edges() and edge_target_1 not in graph_target.edges() and edge_target_2 not in graph_target.edges():
cost += 0
else :
cost = cost + 1 if c_edge_del_ins is None else cost + c_edge_del_ins
lambda_tmp = list(lambda_min)
lambda_tmp.append((liste_node_source[len(lambda_min)], node_target, cost))
OPEN.append(lambda_tmp)
lambda_tmp = list(lambda_min)
cost = lambda_min[-1][2]+1 if c_node_del_ins is None else lambda_min[-1][2]+c_node_del_ins
for matching in lambda_min:
edge_source_1 = (liste_node_source[len(lambda_min)],matching[0])
edge_source_2 = (matching[0],liste_node_source[len(lambda_min)])
if edge_source_1 in graph_source.edges() or edge_source_2 in graph_source.edges():
cost =cost + 1 if c_edge_del_ins is None else cost + c_edge_del_ins
lambda_tmp.append((liste_node_source[len(lambda_min)], "eps", cost))
OPEN.append(lambda_tmp)
else:
for node_target in graph_target.nodes():
if node_target not in liste_node:
lambda_tmp = list(lambda_min)
cost = lambda_min[-1][2]+1 if c_node_del_ins is None else lambda_min[-1][2]+c_node_del_ins
for matching in lambda_min:
edge_target_1 = (node_target, matching[1])
edge_target_2 = (matching[1],node_target)
if edge_target_1 in graph_target.edges() or edge_target_2 in graph_target.edges():
cost = cost + 1 if c_edge_del_ins is None else cost + c_edge_del_ins
lambda_tmp.append(("eps",node_target, cost))
OPEN.append(lambda_tmp)
if __name__ == "__main__":
path ="/home/renton/Documents/Thèse/Code/graph-matching-toolkit-master/data/Letter/"
filename="small.xml"
tree = ET.parse(path+filename)
liste_file = []
for node in tree.findall(".//print"):
fichier = node.attrib["file"]
liste_file.append(fichier)
print(liste_file)
for i in range(len(liste_file)):
for j in range(len(liste_file)):
graph1= create_graph_from_gxl(path+liste_file[i])
graph2= create_graph_from_gxl(path+liste_file[j])
if graph1.number_of_nodes() > graph2.number_of_nodes():
print("result",A_star(graph1,graph2,c_node_sub=None,c_node_del_ins=4,c_edge_sub=None,c_edge_del_ins=1)[-1][-1])
else:
print("result",A_star(graph2,graph1,c_node_sub=None,c_node_del_ins=4,c_edge_sub=None,c_edge_del_ins=1)[-1][-1])
# graph1 = create_graph_from_gxl("../Datasets/gdc-c1/acyclic/dimethyl_ether.gxl")
# graph2 = create_graph_from_gxl("../Datasets/gdc-c1/acyclic/dimethyl_peroxide.gxl")
# graph3 = create_graph_from_gxl("../Datasets/gdc-c1/acyclic/dimethyl_sulfide.gxl")
# graph4 = create_graph_from_gxl("../Datasets/gdc-c1/acyclic/methyl_nonyl_sulfide.gxl")
## print graph1.nodes
## print graph2.nodes