@inproceedings {2017:benchflow:wesoa, title = {Lessons Learned from Evaluating Workflow Management Systems}, year = {2017}, month = {November}, publisher = {Springer}, address = {Malaga, Spain}, abstract = {Workflow Management Systems (WfMSs) today act as service composition engines and service-oriented middleware to enable the execution of automated business processes. Automation based on WfMSs promises to enable the model-driven construction of flexible and easily maintainable services with high-performance characteristics. In the past decade, significant effort has been invested into standardizing WfMSs that compose services, with standards such as the Web Services Business Process Execution Language (WS-BPEL) or the Business Process Model and Notation (BPMN). One of the aims of standardization is to enable users of WfMSs to compare different systems and to avoid vendor lock-in. Despite these efforts, there are many expectations concerning portability, performance efficiency, usability, reliability and maintainability of WfMSs that are likely to be unfulfilled. In this work, we synthesize the findings of two research initiatives that deal with WfMSs conformance and performance benchmarking to distill a set of lessons learned and best practices. These findings provide useful advice for practitioners who plan to evaluate and use WfMSs and for WfMS vendors that would like to foster wider adoption of process-centric service composition middleware. }, keywords = {BenchFlow, Lessons Learned, Workflow Management Systems}, author = {J{\"o}rg Lenhard and Vincenzo Ferme and Simon Harrer and Matthias Geiger and Cesare Pautasso} } @conference {2017:europlop, title = {A Pattern Language for Workflow Engine Conformance and Performance Benchmarking}, booktitle = {22nd European Conference on Pattern Languages of Programs (EuroPLoP)}, year = {2017}, month = {July}, publisher = {ACM}, organization = {ACM}, address = {Kloster Irsee, Germany}, abstract = {Workflow engines are frequently used in the domains of business process management, service orchestration, and cloud computing, where they serve as middleware platforms for integrated business applications. Engines have a significant impact on the quality of service provided by hosted applications. Therefore, it is desirable to compare them and to select the most appropriate engine for a given task. To enable such a comparison, approaches for benchmarking workflow engines have emerged. Although these approaches deal with different quality attributes, i.e., performance or standard conformance, they face many reoccurring design and implementation problems, which have been solved in similar ways. In this paper, we present a pattern language that captures such common solutions to reoccurring problems (e.g., from test identification, benchmarking procedure validation, automatic engine interaction, and workflow execution observation) in the area of workflow engine conformance and performance benchmarking. Our aim is to help future benchmark authors with the pattern language presented in this paper to benefit from our experience with the design and implementation of workflow engine benchmarks and benchmarking tools}, keywords = {BenchFlow, pattern language}, doi = {10.1145/3147704.3147705}, author = {Simon Harrer and J{\"o}rg Lenhard and Oliver Kopp and Vincenzo Ferme and Cesare Pautasso} } @article {benchflow:2017:icse, title = {Workflow Management Systems Benchmarking: Unfulfilled Expectations and Lessons Learned}, year = {2017}, month = {May}, address = {Buenos Aires, Argentina}, abstract = {Workflow Management Systems (WfMSs) are a type of middleware that enables the execution of automated business processes. Users rely on WfMSs to construct flexible and easily maintainable software systems. Significant effort has been invested into standardising languages for business processes execution, with standards such as the Web Services Business Process Execution Language 2.0 or the Business Process Model and Notation 2.0. Standardisation aims at avoiding vendor lock-in and enabling WfMS users to compare different systems. The reality is that, despite standardisation efforts, different independent research initiatives show that objectively comparing WfMSs is still challenging. As a result, WfMS users are likely to discover unfulfilled expectations while evaluating and using these systems. In this work, we discuss the findings of two research initiatives dealing with WfMSs benchmarking, presenting unfulfilled expectations and lessons learned concerning WfMSs{\textquoteright} usability, reliability, and portability. Our goal is to provide advice for practitioners implementing or planning to use WfMSs.}, keywords = {Lessons Learned, Workflow Management Systems}, doi = {10.1109/ICSE-C.2017.126}, author = {Vincenzo Ferme and J{\"o}rg Lenhard and Simon Harrer and Matthias Geiger and Cesare Pautasso} }