Skip to content

Commit

Permalink
Merge branch 'PMPY-2081' into 'integration'
Browse files Browse the repository at this point in the history
PMPY-2081 Major fix WOFLAN undeterminism

Closes PMPY-2081

See merge request process-mining/pm4py/pm4py-core!977
  • Loading branch information
fit-alessandro-berti committed Apr 3, 2023
2 parents 1cb2d2f + aaa6018 commit 5fd45bd
Show file tree
Hide file tree
Showing 7 changed files with 77 additions and 52 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,9 @@ def remove_subtree(tree, n):

initial_mark = helper.convert_marking(net, initial_marking, original_net)
j = 0
unprocessed_nodes = set()
unprocessed_nodes = list()
G.add_node(j, marking=initial_mark)
unprocessed_nodes.add(j)
unprocessed_nodes.append(j)
j += 1

processed_nodes = set()
Expand All @@ -72,34 +72,37 @@ def remove_subtree(tree, n):
if check_if_marking_already_in_processed_nodes(n, processed_nodes):
processed_nodes.add(n)
elif is_m_smaller_than_other(G.nodes[n]['marking'], processed_nodes):
G.remove_edge(next(G.predecessors(n)), n)
predecessors = sorted(list(G.predecessors(n)))
G.remove_edge(predecessors[0], n)
G.remove_node(n)
elif is_m_greater_than_other(G.nodes[n]['marking'], processed_nodes):
m2 = G.nodes[n]['marking'].copy()
ancestor_bool = False
for ancestor in nx.ancestors(G, n):
ancestors = sorted(list(nx.ancestors(G, n)))
for ancestor in ancestors:
if is_m_greater_than_other(G.nodes[n]['marking'], [ancestor]):
i = 0
while i < len(G.nodes[n]['marking']):
if G.nodes[ancestor]['marking'][i] < G.nodes[n]['marking'][i]:
m2[i] = np.inf
i += 1
n1 = None
for ancestor in nx.ancestors(G, n):
for ancestor in ancestors:
if all(np.less_equal(G.nodes[ancestor]['marking'], m2)):
n1 = get_first_smaller_marking_on_path(n, m2)
break
if n1 != None:
ancestor_bool = True
G.nodes[n1]['marking'] = m2.copy()
subtree = nx.bfs_tree(G, n1)
subtree = sorted(list(nx.bfs_tree(G, n1)))
for node in subtree:
if node in processed_nodes:
processed_nodes.remove(node)
if node in unprocessed_nodes:
unprocessed_nodes.remove(node)
del unprocessed_nodes[unprocessed_nodes.index(node)]
G = remove_subtree(G, n1)
unprocessed_nodes.add(n1)
if not n1 in unprocessed_nodes:
unprocessed_nodes.append(n1)
processed_nodes_copy = copy(processed_nodes)
for node in processed_nodes_copy:
if node in G.nodes:
Expand All @@ -109,18 +112,22 @@ def remove_subtree(tree, n):
if node in processed_nodes:
processed_nodes.remove(node)
if node in unprocessed_nodes:
unprocessed_nodes.remove(node)
del unprocessed_nodes[unprocessed_nodes.index(node)]
remove_subtree(G, node)
G.remove_node(node)
if not ancestor_bool:
unprocessed_nodes.add(n)
if n not in unprocessed_nodes:
unprocessed_nodes.append(n)
else:
for el in helper.enabled_markings(firing_dict, req_dict, G.nodes[n]['marking']):
enabled_markings = helper.enabled_markings(firing_dict, req_dict, G.nodes[n]['marking'])
for el in enabled_markings:
G.add_node(j, marking=el[0])
G.add_edge(n, j, transition=el[1])
unprocessed_nodes.add(j)
if j not in unprocessed_nodes:
unprocessed_nodes.append(j)
j += 1
processed_nodes.add(n)

return (G, firing_dict, req_dict)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,13 @@ def construct_tree(net, initial_marking):
if new_arc:
break
if new_arc:
m3 = np.zeros(len(list(net.places)))
for place in list(net.places):
if check_for_smaller_marking(m2, coverability_graph, list(net.places).index(place), m, look_up_indices):
m3[list(net.places).index(place)] = np.inf
lplaces = sorted(list(net.places), key=lambda x: x.name)
m3 = np.zeros(len(lplaces))
for place in lplaces:
if check_for_smaller_marking(m2, coverability_graph, lplaces.index(place), m, look_up_indices):
m3[lplaces.index(place)] = np.inf
else:
m3[list(net.places).index(place)] = m2[0][list(net.places).index(place)]
m3[lplaces.index(place)] = m2[0][lplaces.index(place)]
coverability_graph.add_node(j, marking=m3)
coverability_graph.add_edge(m, j, transition=m2[1])
look_up_indices[np.array2string(m3)] = j
Expand Down
19 changes: 12 additions & 7 deletions pm4py/algo/analysis/woflan/graphs/utility.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ def compute_incidence_matrix(net):
m = len(net.places)
C = np.zeros((m, n))
i = 0
transition_list = list(net.transitions)
place_list = list(net.places)
transition_list = sorted(list(net.transitions), key=lambda x: x.name)
place_list = sorted(list(net.places), key=lambda x: x.name)
while i < n:
t = transition_list[i]
for in_arc in t.in_arcs:
Expand All @@ -35,14 +35,15 @@ def split_incidence_matrix(matrix, net):
of the transition
"""
transition_dict = {}
lst_transitions = sorted(list(net.transitions), key=lambda x: x.name)
i = 0
while i < len(net.transitions):
transition_dict[list(net.transitions)[i]] = np.hsplit(np.transpose(matrix), 1)[0][i]
transition_dict[lst_transitions[i]] = np.hsplit(np.transpose(matrix), 1)[0][i]
i += 1
return transition_dict

def compute_firing_requirement(net):
place_list=list(net.places)
place_list=sorted(list(net.places), key=lambda x: x.name)
transition_dict={}
for transition in net.transitions:
temp_array=np.zeros(len(place_list))
Expand Down Expand Up @@ -70,8 +71,11 @@ def convert_marking(net, marking, original_net=None):
:param original_net: PM4Py Petri Net object without short-circuited transition
:return: Numpy array representation
"""
marking_list=list(el.name for el in marking.keys())
place_list = list(el.name for el in net.places)
#marking_list=list(el.name for el in marking.keys())
#
marking_list = sorted([el.name for el in marking.keys()])
place_list = sorted(list(el.name for el in net.places))

mark = np.zeros(len(place_list))
for index, value in enumerate(mark):
if place_list[index] in marking_list:
Expand All @@ -87,7 +91,8 @@ def check_for_dead_tasks(net, graph):
:return: list of dead tasks
"""
tasks=[]
for transition in list(net.transitions):
lst_transitions = sorted(list(net.transitions), key=lambda x: x.name)
for transition in lst_transitions:
if transition.label != None:
tasks.append(transition)
for node,targets in graph.edges()._adjdict.items():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ def create_network_graph(net):
:return: networkx.DiGraph(), bookkeeping dictionary
"""
graph = nx.DiGraph()
places = list(net.places)
transitions = list(net.transitions)
places = sorted(list(net.places), key=lambda x: x.name)
transitions = sorted(list(net.transitions), key=lambda x: x.name)
nodes=set(places) | set(transitions)
bookkeeping={}
for index,el in enumerate(nodes):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ def compute_incidence_matrix(net):
m = len(net.places)
C = np.zeros((m, n))
i = 0
transition_list = list(net.transitions)
place_list = list(net.places)
transition_list = sorted(list(net.transitions), key=lambda x: x.name)
place_list = sorted(list(net.places), key=lambda x: x.name)
while i < n:
t = transition_list[i]
for in_arc in t.in_arcs:
Expand Down
12 changes: 7 additions & 5 deletions pm4py/algo/analysis/woflan/place_invariants/s_component.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def compare_lists(list1, list2):
return counter

s_components = []
place_list = list(net.places)
place_list = sorted(list(net.places), key=lambda x: x.name)
for invariant in p_invariants:
i = 0
s_component = []
Expand All @@ -48,11 +48,13 @@ def compare_lists(list1, list2):
for el in s_component:
if el in net.transitions:
places_before = [arc.source for arc in el.in_arcs]
if compare_lists(s_component, places_before) != 1:
comparison_before = compare_lists(s_component, places_before)
places_after = [arc.target for arc in el.out_arcs]
comparison_after = compare_lists(s_component, places_after)
if comparison_before != 1:
is_s_component = False
break
places_after = [arc.target for arc in el.out_arcs]
if compare_lists(s_component, places_after) != 1:
if comparison_after != 1:
is_s_component = False
break
if is_s_component:
Expand All @@ -66,7 +68,7 @@ def compute_uncovered_places_in_component(s_components, net):
:param net: Petri Net representation of PM4Py
:return: List of uncovered places
"""
place_list=list(net.places)
place_list=sorted(list(net.places), key=lambda x: x.name)
for component in s_components:
for el in component:
if el in place_list:
Expand Down
48 changes: 29 additions & 19 deletions pm4py/algo/analysis/woflan/place_invariants/utility.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import numpy as np
from copy import copy
from pm4py.util.lp import solver
import pkgutil


def removearray(L, arr):
Expand Down Expand Up @@ -47,6 +48,7 @@ def transform_basis(basis, style=None):
#For uniform variants, it is necessary that the weight for a place is either 0 or 1. We collect the variants for
#which this condition does not hold. We also collect the variants for the weighted invariants the entry is <0.
to_modify = []

for vector in modified_base:
for entry in vector:
if ((entry < 0 or entry > 1) and style=='uniform') or ( entry < 0 and style=='weighted'):
Expand Down Expand Up @@ -89,18 +91,17 @@ def transform_basis(basis, style=None):
this_row = copy(zeros)
this_row[len(set_B)] = list(vector[i])[0]
for j in range(len(modified_base)):
if type(modified_base[j][i]) is np.float64:
this_row[j] = float(modified_base[j][i])
else:
this_row[j] = list(modified_base[j][i])[0]

if style == "uniform":
this_row[len(set_B) + 1 + i] = -1
Aeq.append(this_row)
beq.append(0)
elif style == "weighted":
Aub.append([-x for x in this_row])
bub.append(0)
this_row[j] = list(modified_base[j][i])[0]

if style == "uniform":
this_row[len(set_B) + 1 + i] = -1

if style == "uniform":
Aeq.append(this_row)
beq.append(0)
elif style == "weighted":
Aub.append([-x for x in this_row])
bub.append(0)
for i in range(len(vector)):
last_constraint_1 = copy(zeros)
last_constraint_1[len(set_B) + 1 + i] = 1
Expand All @@ -124,12 +125,19 @@ def transform_basis(basis, style=None):
Aub = np.zeros((1, len(c))).astype(np.float64)
bub = np.zeros(1).transpose().astype(np.float64)

# solution provided by cvxopt seems uncorrect/unstable sometime.
# still looking for causes. for this small linear problem,
# let's just use SCIPY as default solver which is stable with this
# kind of problems.
sol = solver.apply(c, Aub, bub, Aeq, beq, variant=solver.SCIPY)
points = solver.get_points_from_sol(sol, variant=solver.SCIPY)
# this is highly critical and LP solutions are not always correct :(

proposed_solver = solver.SCIPY
if pkgutil.find_loader("pulp"):
proposed_solver = solver.PULP
else:
import warnings
warnings.warn("solution from scipy may be unstable. Please install PuLP (pip install pulp) for fully reliable results.")

sol = solver.apply(c, Aub, bub, Aeq, beq, variant=proposed_solver, parameters={"method": "revised simplex", "require_ilp": True})
points = solver.get_points_from_sol(sol, variant=proposed_solver)
val = solver.get_prim_obj_from_sol(sol, variant=proposed_solver)

if points is not None:
new_vector = np.zeros(len(vector))

Expand All @@ -141,6 +149,8 @@ def transform_basis(basis, style=None):
elif style == "uniform":
for i in range(len(new_vector)):
new_vector[i] = points[len(set_B) + 1 + i]

new_vector = np.array([new_vector]).T
modified_base.append(new_vector)

return modified_base
Expand All @@ -154,7 +164,7 @@ def compute_uncovered_places(invariants, net):
:param net: Petri Net object of PM4Py
:return: List of uncovered place over all invariants
"""
place_list=list(net.places)
place_list=sorted(list(net.places), key=lambda x: x.name)
unncovered_list=place_list.copy()
for invariant in invariants:
for index, value in enumerate(invariant):
Expand Down

0 comments on commit 5fd45bd

Please sign in to comment.