Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/hotfixes' into release
Browse files Browse the repository at this point in the history
  • Loading branch information
fit-alessandro-berti committed Mar 27, 2024
2 parents df708f8 + 8b1cdd8 commit 9a6ddb0
Show file tree
Hide file tree
Showing 9 changed files with 123 additions and 28 deletions.
16 changes: 12 additions & 4 deletions pm4py/algo/discovery/inductive/algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@
from pm4py.objects.dfg.obj import DFG
from pm4py.objects.log.obj import EventLog
from pm4py.objects.process_tree.obj import ProcessTree
from pm4py.objects.process_tree.utils import generic as pt_util
from pm4py.objects.process_tree.utils.generic import tree_sort
from pm4py.util import constants
import warnings
from pm4py.util import exec_utils
Expand All @@ -55,13 +57,15 @@ def apply(obj: Union[EventLog, pd.DataFrame, DFG, UVCL], parameters: Optional[Di
ack = exec_utils.get_param_value(Parameters.ACTIVITY_KEY, parameters, xes_util.DEFAULT_NAME_KEY)
tk = exec_utils.get_param_value(Parameters.TIMESTAMP_KEY, parameters, xes_util.DEFAULT_TIMESTAMP_KEY)
cidk = exec_utils.get_param_value(Parameters.CASE_ID_KEY, parameters, pmutil.constants.CASE_CONCEPT_NAME)

process_tree = ProcessTree()
if type(obj) is DFG:
if variant is not Variants.IMd:
if constants.SHOW_INTERNAL_WARNINGS:
warnings.warn('Inductive Miner Variant requested for DFG artefact is not IMD, resorting back to IMD')
imd = IMD(parameters)
idfg = InductiveDFG(dfg=obj, skip=False)
return imd.apply(IMDataStructureDFG(idfg), parameters)
process_tree = imd.apply(IMDataStructureDFG(idfg), parameters)
else:
if type(obj) in [UVCL]:
uvcl = obj
Expand All @@ -70,12 +74,16 @@ def apply(obj: Union[EventLog, pd.DataFrame, DFG, UVCL], parameters: Optional[Di

if variant is Variants.IM:
im = IMUVCL(parameters)
return im.apply(IMDataStructureUVCL(uvcl), parameters)
process_tree = im.apply(IMDataStructureUVCL(uvcl), parameters)
if variant is Variants.IMf:
imf = IMFUVCL(parameters)
return imf.apply(IMDataStructureUVCL(uvcl), parameters)
process_tree = imf.apply(IMDataStructureUVCL(uvcl), parameters)
if variant is Variants.IMd:
imd = IMD(parameters)
idfg = InductiveDFG(dfg=comut.discover_dfg_uvcl(uvcl), skip=() in uvcl)
return imd.apply(IMDataStructureDFG(idfg), parameters)
process_tree = imd.apply(IMDataStructureDFG(idfg), parameters)

process_tree = pt_util.fold(process_tree)
tree_sort(process_tree)

return process_tree
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def _get_candidate(cls, obj: IMDataStructureUVCL, pool, manager, parameters: Opt
enable_multiprocessing = exec_utils.get_param_value(Parameters.MULTIPROCESSING, parameters, constants.ENABLE_MULTIPROCESSING_DEFAULT)

log = obj.data_structure
candidates = comut.get_alphabet(log)
candidates = sorted(list(comut.get_alphabet(log)))
if pool is None or manager is None or not enable_multiprocessing or len(candidates) <= ActivityConcurrentUVCL.MULTI_PROCESSING_LOWER_BOUND:
for a in candidates:
cut = cls._process_candidate(a, log, parameters=parameters)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class ActivityOncePerTraceUVCL(ActivityConcurrentUVCL):

@classmethod
def _get_candidate(cls, obj: IMDataStructureUVCL, pool=None, manager=None, parameters: Optional[Dict[str, Any]] = None) -> Optional[Any]:
candidates = copy.copy(comut.get_alphabet(obj.data_structure))
candidates = sorted(list(comut.get_alphabet(obj.data_structure)))
for t in obj.data_structure:
cc = [x for x in candidates]
for candi in cc:
Expand Down
2 changes: 1 addition & 1 deletion pm4py/algo/discovery/inductive/fall_through/flower.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def apply(cls, obj: IMDataStructureUVCL, pool=None, manager=None, parameters: Op
Tuple[ProcessTree, List[IMDataStructureUVCL]]]:
log = obj.data_structure
uvcl_do = UVCL()
for a in comut.get_alphabet(log):
for a in sorted(list(comut.get_alphabet(log))):
uvcl_do[(a,)] = 1
uvcl_redo = UVCL()
im_uvcl_do = IMDataStructureUVCL(uvcl_do)
Expand Down
1 change: 1 addition & 0 deletions pm4py/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,7 @@ def convert_log_to_ocel(log: Union[EventLog, EventStream, pd.DataFrame], activit
:param object_types: list of columns to consider as object types
:param obj_separator: separator between different objects in the same column
:param additional_event_attributes: additional attributes to be considered as event attributes in the OCEL
:param additional_object_attributes: additional attributes per object type to be considered as object attributes in the OCEL (dictionary in which object types are associated to their attributes, i.e., {"order": ["quantity", "cost"], "invoice": ["date", "due date"]})
:rtype: ``OCEL``
.. code-block:: python3
Expand Down
2 changes: 1 addition & 1 deletion pm4py/meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
'''

__name__ = 'pm4py'
VERSION = '2.7.11.2'
VERSION = '2.7.11.3'
__version__ = VERSION
__doc__ = 'Process mining for Python'
__author__ = 'Fraunhofer Institute for Applied Information Technology FIT'
Expand Down
93 changes: 74 additions & 19 deletions pm4py/objects/conversion/wf_net/variants/to_process_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,12 @@
from enum import Enum

from pm4py.objects.petri_net.utils import petri_utils as pn_util
from pm4py.objects.petri_net.obj import PetriNet
from pm4py.objects.petri_net.obj import PetriNet, Marking
from pm4py.objects.process_tree import obj as pt_operator
from pm4py.objects.process_tree.utils import generic as pt_util
from pm4py.objects.process_tree.utils.generic import tree_sort
from pm4py.util import exec_utils
from pm4py.objects.process_tree.utils.generic import parse

TRANSITION_PREFIX = str(uuid.uuid4())

Expand Down Expand Up @@ -232,6 +233,51 @@ def binary_sequence_detection(net):
return None


def __group_blocks_internal(net, parameters=None):
if parameters is None:
parameters = {}

if binary_choice_detection(net) is not None:
return True
elif binary_sequence_detection(net) is not None:
return True
elif binary_concurrency_detection(net) is not None:
return True
elif binary_loop_detection(net) is not None:
return True
else:
return False


def __insert_dummy_invisibles(net, im, fm, ini_places, parameters=None):
if parameters is None:
parameters = {}

places = list(net.places)

for p in places:
if p.name in ini_places:
if p not in im and p not in fm:
source_trans = [x.source for x in p.in_arcs]
target_trans = [x.target for x in p.out_arcs]

pn_util.remove_place(net, p)
source_p = PetriNet.Place(str(uuid.uuid4()))
target_p = PetriNet.Place(str(uuid.uuid4()))
skip = PetriNet.Transition(str(uuid.uuid4()))
net.places.add(source_p)
net.places.add(target_p)
net.transitions.add(skip)

pn_util.add_arc_from_to(source_p, skip, net)
pn_util.add_arc_from_to(skip, target_p, net)

for t in source_trans:
pn_util.add_arc_from_to(t, source_p, net)
for t in target_trans:
pn_util.add_arc_from_to(target_p, t, net)


def group_blocks_in_net(net, parameters=None):
"""
Groups the blocks in the Petri net
Expand All @@ -257,18 +303,25 @@ def group_blocks_in_net(net, parameters=None):
raise ValueError('The Petri net provided is not a WF-net')

net = deepcopy(net)
ini_places = set(x.name for x in net.places)

while len(net.transitions) > 1:
if binary_choice_detection(net) is not None:
continue
elif binary_sequence_detection(net) is not None:
continue
elif binary_concurrency_detection(net) is not None:
continue
elif binary_loop_detection(net) is not None:
im = Marking({p: 1 for p in net.places if len(p.in_arcs) == 0})
fm = Marking({p: 1 for p in net.places if len(p.out_arcs) == 0})

if len(im) != 1 and len(fm) != 1:
# start/end conditions for block-structured nets
# do not hold
break

if __group_blocks_internal(net, parameters):
continue
else:
break
__insert_dummy_invisibles(net, im, fm, ini_places, parameters)
if __group_blocks_internal(net, parameters):
continue
else:
break

return net

Expand Down Expand Up @@ -299,14 +352,16 @@ def apply(net, im, fm, parameters=None):

grouped_net = group_blocks_in_net(net, parameters=parameters)

if len(grouped_net.transitions) == 1:
pt_str = list(grouped_net.transitions)[0].label
pt = pt_operator.ProcessTree(operator=None, label=pt_str)
ret = pt_util.fold(pt) if fold else pt
tree_sort(ret)
return ret
if debug:
from pm4py.visualization.petri_net import visualizer as pn_viz
pn_viz.view(pn_viz.apply(grouped_net, parameters={"format": "svg"}))
return grouped_net
else:
if debug:
from pm4py.visualization.petri_net import visualizer as pn_viz
pn_viz.view(pn_viz.apply(grouped_net, parameters={"format": "svg"}))
raise ValueError('Parsing of WF-net Failed')
if len(grouped_net.transitions) == 1:
pt_str = list(grouped_net.transitions)[0].label
pt = parse(pt_str)
ret = pt_util.fold(pt) if fold else pt
tree_sort(ret)
return ret
else:
raise ValueError('Parsing of WF-net Failed')
4 changes: 4 additions & 0 deletions pm4py/objects/ocel/util/log_ocel.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,6 +325,10 @@ def log_to_ocel_multiple_obj_types(log_obj: Union[EventLog, EventStream, pd.Data
Separator between different objects in the same column
additional_event_attributes
Additional attributes to be considered as event attributes in the OCEL
additional_object_attributes
Additional attributes per object type to be considered as object attributes in the OCEL
(dictionary in which object types are associated to their attributes, i.e.,
{"order": ["quantity", "cost"], "invoice": ["date", "due date"]})
Returns
----------------
Expand Down
29 changes: 28 additions & 1 deletion pm4py/objects/process_tree/utils/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,31 @@ def fold(tree):
tree.children.clear()
del tree
tree = root
if str(reduce_tau_leafs(copy.deepcopy(tree))) != str(tree):

tree_str = str(tree)
tree = reduce_tau_leafs(tree)
tau_leafs_red_tree_str = str(tree)

if len(tau_leafs_red_tree_str) != len(tree_str):
tree = fold(tree)
tree_str = str(tree)

tree2 = _fold(tree)
tree2_str = str(tree2)

if len(tree2_str) != len(tree_str):
tree = fold(tree2)

return tree


def _fold(tree):
tree = reduce_tau_leafs(tree)

if len(tree.children) > 0:
tree.children = list(map(lambda c: _fold(c), tree.children))
tree.children = list(filter(lambda c: c is not None, tree.children))

if len(tree.children) == 0:
tree.parent = None
tree.children = None
Expand All @@ -64,6 +79,18 @@ def _fold(tree):
tree.parent = None
tree.children = None
return child

if tree.operator in [pt_op.Operator.SEQUENCE, pt_op.Operator.PARALLEL]:
i = 0
while i < len(tree.children):
child = tree.children[i]
if child.operator is None and child.label is None:
del tree.children[i]
continue
i = i + 1
if len(tree.children) == 0:
tree.operator = None

if tree.operator in [pt_op.Operator.SEQUENCE, pt_op.Operator.XOR, pt_op.Operator.PARALLEL]:
chlds = [c for c in tree.children]
for c in chlds:
Expand Down

0 comments on commit 9a6ddb0

Please sign in to comment.