@inproceedings{8053,
  author       = {{Luckey, Markus and Gerth, Christian and Soltenborn, Christian and Engels, Gregor}},
  booktitle    = {{Proceedings of the 8th International Conference on Autonomic Computing (ICAC'11)}},
  publisher    = {{ACM}},
  title        = {{{QUAASY - QUality Assurance of Adaptive SYstems}}},
  doi          = {{http://dx.doi.org/10.1145/1998582.1998617}},
  year         = {{2011}},
}

@inproceedings{8054,
  author       = {{Bandener, Nils and Soltenborn, Christian and Engels, Gregor}},
  booktitle    = {{Proceedings of the 3rd International Conference on Software Language Engineering (SLE 2010)}},
  pages        = {{357--376}},
  publisher    = {{Springer}},
  title        = {{{Extending DMM Behavior Specifications for Visual Execution and Debugging}}},
  doi          = {{http://dx.doi.org/10.1007/978-3-642-19440-5_24}},
  volume       = {{6563}},
  year         = {{2011}},
}

@techreport{8221,
  author       = {{Brüseke, Frank and Christ, Fabian and Sauer, Stefan and Wübbeke, Andreas}},
  publisher    = {{University of Paderborn, Software Quality Lab (s-lab)}},
  title        = {{{Testen von Software-Produktlinien}}},
  year         = {{2011}},
}

@inbook{6293,
  author       = {{Nebe, Karsten and Klompmaker, Florian and Jung, Helge and Fischer, Holger Gerhard}},
  booktitle    = {{Human-Computer Interaction. Interaction Techniques and Environments.}},
  editor       = {{Jacko, Julie Anne}},
  isbn         = {{9783642216046}},
  issn         = {{0302-9743}},
  location     = {{Orlando, USA}},
  pages        = {{100--109}},
  publisher    = {{Springer Berlin Heidelberg}},
  title        = {{{Exploiting New Interaction Techniques for Disaster Control Management Using Multitouch-, Tangible- and Pen-Based-Interaction}}},
  doi          = {{10.1007/978-3-642-21605-3_11}},
  volume       = {{6762}},
  year         = {{2011}},
}

@inproceedings{6298,
  author       = {{Nebe, Karsten and Fischer, Holger Gerhard and Klompmaker, Florian and Jung, Helge}},
  booktitle    = {{Mensch & Computer 2011, 11. fachübergreifende Konferenz für interaktive und kooperative Medien, überMEDIEN - ÜBERmorgen}},
  pages        = {{263--273}},
  publisher    = {{Oldenbourg Wissenschaftsverlag}},
  title        = {{{Multitouch-, Be-Greifbare- und Stiftbasierte-Interaktion in der Einsatzlageplanung}}},
  year         = {{2011}},
}

@inproceedings{6299,
  author       = {{Jung, Helge and Nebe, Karsten and Klompmaker, Florian and Fischer, Holger Gerhard}},
  booktitle    = {{Mensch & Computer 2011, 11. fachübergreifende Konferenz für interaktive und kooperative Medien, überMEDIEN - ÜBERmorgen}},
  pages        = {{305--308}},
  publisher    = {{Oldenbourg Wissenschaftsverlag}},
  title        = {{{Authentifizierte Eingaben auf Multitouch-Tischen}}},
  year         = {{2011}},
}

@inbook{6300,
  author       = {{Fischer, Holger Gerhard and Nebe, Karsten and Klompmaker, Florian}},
  booktitle    = {{Human Centered Design}},
  editor       = {{Kurosu, Masaaki}},
  isbn         = {{9783642217524}},
  issn         = {{0302-9743}},
  location     = {{Orlando, USA}},
  pages        = {{28--37}},
  publisher    = {{Springer Berlin Heidelberg}},
  title        = {{{A Holistic Model for Integrating Usability Engineering and Software Engineering Enriched with Marketing Activities}}},
  doi          = {{10.1007/978-3-642-21753-1_4}},
  volume       = {{6776}},
  year         = {{2011}},
}

@inproceedings{6301,
  author       = {{Fischer, Holger Gerhard and Bogner, Christian and Geis, Thomas and Polkehn, Knut and Zimmermann, Dirk}},
  booktitle    = {{Jahresband Usability Professionals}},
  pages        = {{72--74}},
  publisher    = {{German UPA}},
  title        = {{{Der Qualitätsstandard für Usability Engineering der German UPA – Aktueller Stand der Arbeiten}}},
  year         = {{2011}},
}

@article{7353,
  abstract     = {{Manuelle Testerstellung verursacht hohe Kosten. Im Vergleich dazu bietet modellbasiertes Testen große Vorteile hinsichtlich Testautomatisierung, früher Fehlerfindung, Erhöhung der Testabdeckung, effizienten Testentwurfs und besserer Rückverfolgbarkeit. Die Einführung des modellbasierten Testens ist jedoch mit Investitionen verbunden, für die die Rendite häufig unklar erscheint. Dabei finden sich in der Literatur bereits etliche Erfahrungsberichte zur erfolg­reichen Einführung von modellbasiertem Testen in unterschiedlichen Anwendungsdomänen. In diesem Artikel präsentieren wir einen Überblick über einige dieser Erfahrungsberichte.}},
  author       = {{Weißleder, Stephan and Güldali, Baris and Mlynarski, Michael and Törsel, Arne-Michael and Faragó, David and Prester, Florian and Winter, Mario}},
  journal      = {{OBJEKTspektrum}},
  number       = {{6}},
  pages        = {{59--65}},
  title        = {{{Modellbasiertes Testen: Hype oder Realität?}}},
  year         = {{2011}},
}

@article{7354,
  abstract     = {{Acceptance testing is a time-consuming task for complex software systems that have to fulfill a large number of requirements. To reduce this effort, we have developed a widely automated method for deriving test plans from requirements that are expressed in natural language. It consists of three stages: annotation, clustering, and test plan specification. The general idea is to exploit redundancies and implicit relationships in requirements specifications. Multi-viewpoint techniques based on RM-ODP (Reference Model for Open Distributed Processing) are employed for specifying the requirements. We then use linguistic analysis techniques, requirements clustering algorithms, and pattern-based requirements collection to reduce the total effort of testing against the requirements specification. In particular, we use linguistic analysis for extracting and annotating the actor, process and object of a requirements statement. During clustering, a similarity function is computed as a measure for the overlap of requirements. In the test plan specification stage, our approach provides capabilities for semi-automatically deriving test plans and acceptance criteria from the clustered informal textual requirements. Two patterns are applied to compute a suitable order of test activities. The generated test plans consist of a sequence of test steps and asserts that are executed or checked in the given order. We also present the supporting prototype tool TORC, which is available open source. For the evaluation of the approach, we have conducted a case study in the field of acceptance testing of a national electronic identification system. In summary, we report on lessons learned how linguistic analysis and clustering techniques can help testers in understanding the relations between requirements and for improving test planning.}},
  author       = {{Güldali, Baris and Funke, Holger and Sauer, Stefan and Engels, Gregor}},
  issn         = {{0963-9314}},
  journal      = {{Software Quality Journal}},
  number       = {{4}},
  pages        = {{771--799}},
  publisher    = {{Springer Nature}},
  title        = {{{TORC: test plan optimization by requirements clustering}}},
  doi          = {{10.1007/s11219-011-9149-4}},
  volume       = {{19}},
  year         = {{2011}},
}

@article{7355,
  abstract     = {{Dynamic Meta Modeling (DMM) is a visual semantics specification technique targeted at languages based on a metamodel. A DMM specification consists of a runtime metamodel and operational rules which describe how instances of the runtime metamodel change over time. A known deficiency of the DMM approach is that it does not support the refinement of a DMM specification, e.g., in the case of defining the semantics for a refined and extended domain-specific language (DSL). Up to now, DMM specifications could only be reused by adding or removing DMM rules. In this paper, we enhance DMM such that DMM rules can override other DMM rules, similar to a method being overridden in a subclass, and we show how rule overriding can be realized with the graph transformation tool GROOVE. We argue that rule overriding does not only have positive impact on reusability, but also improves the intuitive understandability of DMM semantics specifications.}},
  author       = {{Soltenborn, Christian and Engels, Gregor}},
  issn         = {{1045-926X}},
  journal      = {{Journal of Visual Languages & Computing}},
  number       = {{3}},
  pages        = {{233--250}},
  publisher    = {{Elsevier BV}},
  title        = {{{Using rule overriding to improve reusability and understandability of Dynamic Meta Modeling specifications}}},
  doi          = {{10.1016/j.jvlc.2010.12.005}},
  volume       = {{22}},
  year         = {{2011}},
}

@inproceedings{7535,
  author       = {{Böckelmann, Irina and Schenk, Daniel and Rößler, Thoralf and Adler, Simon and Senft, Björn and Grubert, Jens and Mecke, Rüdiger and Huckauf, Anke and Urbina, Mario and Tümler, Johannes and Darius, Sabine}},
  title        = {{{Physiologische Beanspruchungsreaktionen bei der Anwendung von kopfgetragenen AR-Displays}}},
  year         = {{2011}},
}

@inproceedings{7536,
  author       = {{Klompmaker, Florian and Senft, Björn and Nebe, Karsten and Busch, Clemens and Willemsen, Detlev}},
  booktitle    = {{{HEALTHINF} 2011 - Proceedings of the International Conference on Health Informatics, Rome, Italy, 26-29 January, 2011}},
  pages        = {{268--273}},
  title        = {{{User Centered Design Process of OSAMi-D - Developing User Interfaces for a Remote Ergometer Training Application}}},
  year         = {{2011}},
}

@inproceedings{8471,
  abstract     = {{Performance is an important quality attribute for business information systems. When a tester has spotted a performance error, the error is passed to the software developers to fix it. However, in component-based software development the tester has to do blame analysis first, i. e. the tester has to decide, which party is responsible to fix the error. If the error is a design or deployment issue, it can be assigned to the software architect or the system deployer. If the error is specific to a component, it needs to be assigned to the corresponding component developer. An accurate blame analysis is important, because wrong assignments of errors will cause a loss of time and money. Our approach aims at doing blame analysis for performance errors by comparing performance metrics obtained in performance testing and performance prediction. We use performance prediction values as expected values for individual components. For performance prediction we use the Palladio approach. By this means, our approach evaluates each component’s performance in a certain test case. If the component performs poorly, its component developer needs to fix the component or the architect replaces the component with a faster one. If no omponent performs poorly, we can deduce that there is a design or deployment issue and the architecture needs to be changed. In this paper, we present an exemplary blame analysis based on a web shop system. The example shows the feasibility of our approach.}},
  author       = {{Brüseke, Frank and Becker, Steffen and Engels, Gregor}},
  booktitle    = {{Proceedings of the 16th International Workshop on Component-Oriented Programming (WCOP; satellite event of the CompArch 2011), Boulder Colorado, CO (USA)}},
  pages        = {{25--32}},
  publisher    = {{ACM}},
  title        = {{{Palladio-based performance blame analysis}}},
  doi          = {{http://dx.doi.org/10.1145/2000292.2000298}},
  year         = {{2011}},
}

@inproceedings{8472,
  author       = {{Nagel, Benjamin }},
  booktitle    = {{Proceedings of the Software Engineering 2011 (SE 2011), Karlsruhe (Germany)}},
  publisher    = {{Gesellschaft für Informatik (GI)}},
  title        = {{{Semi-automatische Ableitung externer Anpassungsmechanismen für selbst-adaptive Systeme}}},
  year         = {{2011}},
}

@inproceedings{652,
  abstract     = {{In the development process of service-oriented systems, business process models are used at different levels. Typically, high-level business process models that describe business requirements and needs are stepwise refined to the IT level by different business modelers and software architects. As a result, different process model versions must be compared and merged by means of model version control. An important prerequisite for process model version control is an elaborated matching approach that results in precise mappings between different process model versions. The challenge of such an approach is to deal with syntactically different process models that are semantically equivalent. For that purpose, matching techniques must consider the semantics of process modeling languages.In this paper, we present a matching approach for process models in a versioning scenario. Based on a term formalization of process models, we enable an efficient and effective way to match syntactically different but semantically equivalent process models resulting in precise mappings.}},
  author       = {{Gerth, Christian and Luckey, Markus and Küster, Jochen and Engels, Gregor}},
  booktitle    = {{Proceedings of the IEEE 8th International Conference on Service Computingt (SCC)}},
  pages        = {{218----225}},
  title        = {{{Precise Mappings between Business Process Models in Versioning Scenarios}}},
  doi          = {{10.1109/SCC.2011.65}},
  year         = {{2011}},
}

@misc{661,
  author       = {{Arifulina, Svetlana}},
  publisher    = {{Universität Paderborn}},
  title        = {{{Coverage Criteria for Testing DMM Specifications}}},
  year         = {{2011}},
}

@inproceedings{6740,
  author       = {{Böttcher, Stefan and Bokermann, Dennis and Hartel, Rita}},
  booktitle    = {{Advances in Databases - 28th British National Conference on Databases, BNCOD 28, Revised Selected Papers}},
  pages        = {{209--220}},
  publisher    = {{Springer Berlin/Heidelberg}},
  title        = {{{Computing Compressed XML Data from Relational Databases}}},
  volume       = {{7051}},
  year         = {{2011}},
}

@proceedings{7770,
  editor       = {{Engels, Gregor and Lewerentz, Claus and Schäfer, Wilhelm and Schürr, Andy and Westfechtel, Bernhard}},
  publisher    = {{Springer}},
  title        = {{{Graph Transformations and Model-Driven Engineering - Essays Dedicated to Manfred Nagl on the Occasion of his 65th Birthday}}},
  doi          = {{http://dx.doi.org/10.1007/978-3-642-17322-6}},
  volume       = {{5765}},
  year         = {{2010}},
}

@proceedings{7771,
  editor       = {{Engels, Gregor and Luckey, Markus and Pretschner, Alexander and H. Reussner, Ralf}},
  publisher    = {{Gesellschaft für Informatik (GI)}},
  title        = {{{Software Engineering 2010 - Workshop Proceedings (inkl. Doktoranden Symposium), Paderborn (Germany)}}},
  volume       = {{P-160}},
  year         = {{2010}},
}

