@proceedings {2017:benchflow:bpmds, title = {Performance Comparison Between BPMN 2.0 Workflow Management Systems Versions}, year = {2017}, month = {June}, publisher = {Springer}, address = {Essen, Germany}, abstract = {Software has become a rapidly evolving artifact and Workflow Management Systems (WfMSs) are not an exception. WfMSs{\textquoteright} changes may impact key performance indicators or resource consumption levels may change among different versions. Thus, users considering a WfMS upgrade need to evaluate the extent of such changes for frequently issued workload. Deriving such information requires running performance experiments with appropriate workloads. In this paper, we propose a novel method for deriving a structurally representative workload from a given business process collection, which we later use to evaluate the performance and resource consumption over four versions of two open-source WfMSs, for different numbers of simulated users. In our case study scenario the results reveal relevant variations in the WfMSs{\textquoteright} performance and resource consumption, indicating a decrease in performance for newer versions.}, keywords = {BenchFlow, BPMN, Performance Regression, Performance Testing, workflow engine, Workflow Management Systems}, author = {Vincenzo Ferme and Marigianna Skouradaki and Ana Ivanchikj and Cesare Pautasso and Frank Leymann} } @conference {benchflow:2017:bpm, title = {On the Performance Overhead of BPMN Modeling Practices}, booktitle = {15th International Conference on Business Process Management (BPM2017)}, year = {2017}, month = {September}, pages = {216--232}, publisher = {Springer}, organization = {Springer}, address = {Barcelona, Spain}, abstract = {Business process models can serve different purposes, from discussion and analysis among stakeholders, to simulation and execution. While work has been done on deriving modeling guidelines to improve understandability, it remains to be determined how different modeling practices impact the execution of the models. In this paper we observe how semantically equivalent, but syntactically different, models behave in order to assess the performance impact of different modeling practices. To do so, we propose a methodology for systematically deriving semantically equivalent models by applying a set of model transformation rules and for precisely measuring their execution performance. We apply the methodology on three scenarios to systematically explore the performance variability of 16 different versions of parallel, exclusive, and inclusive control flows. Our experiments with two open-source business process management systems measure the execution duration of each model{\textquoteright}s instances. The results reveal statistically different execution performance when applying different modeling practices without total ordering of performance ranks. }, keywords = {BenchFlow, BPMN, performance}, doi = {10.1007/978-3-319-65000-5_13}, author = {Ana Ivanchikj and Vincenzo Ferme and Cesare Pautasso} } @conference {benchflow:2016:closer, title = {A Container-centric Methodology for Benchmarking Workflow Management Systems}, booktitle = {6th International Conference on Cloud Computing and Service Science (CLOSER 2016)}, year = {2016}, month = {April}, pages = {74-84}, publisher = {SciTePress}, organization = {SciTePress}, address = {Rome, Italy}, abstract = {Trusted benchmarks should provide reproducible results obtained following a transparent and well-defined process. In this paper, we show how Containers, originally developed to ease the automated deployment of Cloud application components, can be used in the context of a benchmarking methodology. The proposed methodology focuses on Workflow Management Systems (WfMSs), a critical service orchestration middleware, which can be characterized by its architectural complexity, for which Docker Containers offer a highly suitable approach. The contributions of our work are: 1) a new benchmarking approach taking full advantage of containerization technologies; and 2) the formalization of the interaction process with the WfMS vendors described clearly in a written agreement. Thus, we take advantage of emerging Cloud technologies to address technical challenges, ensuring the performance measurements can be trusted. We also make the benchmarking process transparent, automated, and repeatable so that WfMS vendors can join the benchmarking effort.}, keywords = {BenchFlow, benchmarking, Docker}, doi = {10.5220/0005908400740084}, author = {Vincenzo Ferme and Ana Ivanchikj and Cesare Pautasso and Marigianna Skouradaki and Frank Leymann} } @conference {benchflow:2016:bpm, title = {Estimating the Cost for Executing Business Processes in the Cloud}, booktitle = {BPM Forum}, year = {2016}, month = {September}, pages = {72--88}, publisher = {Springer}, organization = {Springer}, address = {Rio de Janeiro, Brazil}, abstract = {Managing and running business processes in the Cloud changes how Workflow Management Systems (WfMSs) are deployed. Consequently, when designing such WfMSs, there is a need of determining the sweet spot in the performance vs. resource consumption trade-off. While all Cloud providers agree on the pay-as-you-go resource consumption model, every provider uses a different cost model to gain a competitive edge. In this paper, we present a novel method for estimating the infrastructure costs of running business processes in the Cloud. The method is based on the precise measurement of the resources required to run a mix of business process in the Cloud, while accomplishing expected performance requirements. To showcase the method we use the BenchFlow framework to run experiments on a widely used open-source WfMS executing custom workload with a varying number of simulated users. The experiments are necessary to reliably measure WfMS{\textquoteright}s performance and resource consumption, which is then used to estimate the infrastructure costs of executing such workload on four different Cloud providers.}, keywords = {BenchFlow, cloud computing, cloud workflows}, doi = {10.1007/978-3-319-45468-9_5}, author = {Vincenzo Ferme and Ana Ivanchikj and Cesare Pautasso} } @demo {benchflow:2015:bpmeter, title = {BPMeter: Web Service and Application for Static Analysis of BPMN 2.0 Collections}, year = {2015}, month = {August}, pages = {30-34}, publisher = {Springer}, address = {Innsbruck, Austria}, abstract = {The number of business process models is constantly increasing as companies realize the competitive advantage of managing their processes. Measuring their size and structural properties can give useful insights. With the BPMeter tool, process owners can quickly compare their process with company{\textquoteright}s process portfolio, researchers can statically analyze a process to see which modeling language features have been used in practice, while modelers can obtain an aggregated view over their processes. In this demonstration we show how to use BPMeter, which provides a simple Web application to visualize the results of applying over 100 different size and structure metrics to BPMN 2.0 process models. The visualization features measurements, statistics and the possibility to compare the measurements with the ones obtained from the entire portfolio. Moreover we show how to invoke its RESTful Web API so that the BPMeter analyzer can be easily integrated with existing process management tools.}, keywords = {BenchFlow, BPMN, Workflow Static Analysis}, author = {Ana Ivanchikj and Vincenzo Ferme and Cesare Pautasso} } @conference {benchflow:2015:bpm, title = {A Framework for Benchmarking BPMN 2.0 Workflow Management Systems}, booktitle = {13th International Conference on Business Process Management (BPM 2015)}, year = {2015}, month = {August}, publisher = {Springer}, organization = {Springer}, address = {Innsbruck, Austria}, keywords = {BenchFlow, BPMN, Workflow Benchmarking}, doi = {10.1007/978-3-319-23063-4_18}, author = {Vincenzo Ferme and Ana Ivanchikj and Cesare Pautasso} }