@conference {benchflow:2016:closer, title = {A Container-centric Methodology for Benchmarking Workflow Management Systems}, booktitle = {6th International Conference on Cloud Computing and Service Science (CLOSER 2016)}, year = {2016}, month = {April}, pages = {74-84}, publisher = {SciTePress}, organization = {SciTePress}, address = {Rome, Italy}, abstract = {Trusted benchmarks should provide reproducible results obtained following a transparent and well-defined process. In this paper, we show how Containers, originally developed to ease the automated deployment of Cloud application components, can be used in the context of a benchmarking methodology. The proposed methodology focuses on Workflow Management Systems (WfMSs), a critical service orchestration middleware, which can be characterized by its architectural complexity, for which Docker Containers offer a highly suitable approach. The contributions of our work are: 1) a new benchmarking approach taking full advantage of containerization technologies; and 2) the formalization of the interaction process with the WfMS vendors described clearly in a written agreement. Thus, we take advantage of emerging Cloud technologies to address technical challenges, ensuring the performance measurements can be trusted. We also make the benchmarking process transparent, automated, and repeatable so that WfMS vendors can join the benchmarking effort.}, keywords = {BenchFlow, benchmarking, Docker}, doi = {10.5220/0005908400740084}, author = {Vincenzo Ferme and Ana Ivanchikj and Cesare Pautasso and Marigianna Skouradaki and Frank Leymann} } @demo {benchflow:2016:icpe, title = {Integrating Faban with Docker for Performance Benchmarking}, year = {2016}, month = {March}, pages = {129-130}, publisher = {ACM}, address = {Delft, The Netherlands}, abstract = {Reliability and repeatability are key requirements in performance benchmarking ensuring the trustworthiness of the obtained performance results. To apply a benchmark to multiple systems, the reusability of the load driver is essential. While Faban has been designed to ensure the reliability of the performance data obtained from a benchmark experiment, it lacks support for ensuring that the system under test is deployed in a known configuration. This is what Docker, a recently emerging containerization technology, excels at. In this demo paper we present how we integrated Faban with Docker as part of the BenchFlow framework to offer a complete and automated performance benchmarking framework that provides a reliable and reusable environment, ensuring the repeatability of the experiments. }, keywords = {BenchFlow, benchmarking, Docker, Faban}, doi = {10.1145/2851553.2858676}, author = {Vincenzo Ferme and Cesare Pautasso} } @conference {benchflow:2016:caise, title = {Micro-Benchmarking BPMN 2.0 Workflow Management Systems with Workflow Patterns}, booktitle = {Proc. of the 28th International Conference on Advanced Information Systems Engineering (CAISE)}, year = {2016}, month = {June}, pages = {67--82}, publisher = {Springer}, organization = {Springer}, address = {Ljubljana, Slovenia}, abstract = {Although Workflow Management Systems (WfMSs) are a key component in workflow technology, research work for assessing and comparing their performance is limited. This work proposes the first micro- benchmark for WfMSs that can execute BPMN 2.0 workflows. To this end, we focus on studying the performance impact of well-known workflow patterns expressed in BPMN 2.0 with respect to three open source WfMSs (i.e., Activiti, jBPM and Camunda). We executed all the experiments under a reliable environment and produced a set of meaningful metrics. This paper contributes to the area of workflow technology by defining building blocks for more complex BPMN 2.0 WfMS benchmarks. The results have shown bottlenecks on architectural design decisions, resource utilization, and limits on the load a WfMS can sustain, especially for the cases of complex and parallel structures. Experiments on a mix of workflow patterns indicated that there are no unexpected performance side effects when executing different workflow patterns concurrently, although the duration of the individual workflows that comprised the mix was increased.}, keywords = {BenchFlow, benchmarking, BPMN, Microbenchmark, workflow engine, Workflow Management Systems, workflow patterns}, doi = {10.1007/978-3-319-39696-5_5}, url = {http://www2.informatik.uni-stuttgart.de/cgi-bin/NCSTRL/NCSTRL_view.pl?id=INPROC-2016-05\&engl=0}, author = {Marigianna Skouradaki and Vincenzo Ferme and Cesare Pautasso and Frank Leymann and Andr{\'e} van Hoorn} } @conference {benchflow:2015:icpe, title = {On the Road to Benchmarking BPMN 2.0 Workflow Engines}, booktitle = {6th ACM/SPEC International Conference on Performance Engineering}, year = {2015}, month = {January}, publisher = {IEEE}, organization = {IEEE}, address = {Austin, TX, USA}, abstract = {Workflow Management Systems (WfMSs) provide platforms for delivering complex service-oriented applications that need to satisfy enterprise-grade quality of service requirements such as dependability and scalability. In this paper we focus on the case of benchmarking the performance of the core of WfMSs, Workflow Engines, that are compliant with the Business Process Model and Notation 2.0 (BPMN 2.0) standard. We first explore the main challenges that need to be met when designing such a benchmark and describe the approaches we designed for tackling them in the BenchFlow project. We discuss our approach to distill the essence of real-world processes to create from it processes for the benchmark, and to ensure that the benchmark finds wide applicability.}, keywords = {BenchFlow, benchmarking}, doi = {10.1145/2668930.2695527}, author = {Marigianna Skouradaki and Vincenzo Ferme and Frank Leymann and Cesare Pautasso and Dieter Roller} } @conference {benchflow:2015:btw, title = {Towards Workflow Benchmarking: Open Research Challenges}, booktitle = {16. Fachtagung Datenbanksysteme f{\"u}r Business, Technologie und Web (BTW)}, year = {2015}, month = {March}, pages = {331-350}, publisher = {Gesellschaft f{\"u}r Informatik}, organization = {Gesellschaft f{\"u}r Informatik}, address = {Hamburg, Germany}, keywords = {BenchFlow, benchmarking, workflow}, url = {http://www.btw-2015.de/?programm_main}, author = {Cesare Pautasso and Dieter Roller and Frank Leymann and Vincenzo Ferme and Marigianna Skouradaki} } @conference {benchflow:2014:sosp, title = {Technical Open Challenges on Benchmarking Workflow Management Systems}, booktitle = {Symposium on Software Performance}, year = {2014}, month = {November}, pages = {105-112}, address = {Stuttgart, Germany}, abstract = {The goal of the BenchFlow project is to design the first benchmark for assessing and comparing the performance of BPMN 2.0 Workflow Management Systems (WfMSs). WfMSs have become the platform to build composite service-oriented applications, whose performance depends on two factors: the performance of the workflow system itself and the performance of the composed services (which could lie outside of the control of the workflow). Our main goal is to present to the community the state of our work, and the open challenges of a complex industry-relevant benchmark}, keywords = {BenchFlow, benchmarking, workflow engine}, url = {http://www.performance-symposium.org/2014/proceedings/}, author = {Marigianna Skouradaki and Dieter Roller and Frank Leymann and Vincenzo Ferme and Cesare Pautasso} }