@inproceedings{8059,
  author       = {{Engels, Gregor}},
  booktitle    = {{Software Engineering 2013}},
  pages        = {{17--18}},
  title        = {{{On-the-Fly Computing - Das Entwicklungs- und Betriebsparadigma für Softwaresysteme der Zukunft}}},
  volume       = {{P-213}},
  year         = {{2013}},
}

@inproceedings{8060,
  author       = {{Huma, Zille and Gerth, Christian and Engels, Gregor and Juwig, Oliver}},
  booktitle    = {{Proceedings of the 11th International Conference on Service Oriented Computing (ICSOC'13)}},
  pages        = {{524--532}},
  publisher    = {{Springer-Verlag}},
  title        = {{{Automated Service Composition for On-the-Fly SOAs}}},
  volume       = {{8274}},
  year         = {{2013}},
}

@inproceedings{8061,
  author       = {{Luckey, Markus and Engels, Gregor}},
  booktitle    = {{Proceeding of the 8th international symposium on Software engineering for adaptive and self-managing systems}},
  pages        = {{143--152}},
  publisher    = {{ACM}},
  title        = {{{High-­Quality Specification of Self-­Adaptive Software Systems}}},
  year         = {{2013}},
}

@inproceedings{8062,
  author       = {{Brüseke, Frank and Becker, Steffen and Engels, Gregor}},
  booktitle    = {{Proceedings of the 4th ACM/SPEC International Conference on Performance Engineering (ICPE 2013), Prague (Czech Republic)}},
  pages        = {{77--88}},
  publisher    = {{ACM New York, NY, USA}},
  title        = {{{Decision Support via Automated Metric Comparison for the Palladio-based Performance Blame Analysis}}},
  year         = {{2013}},
}

@inproceedings{8063,
  author       = {{Becker, Matthias and Luckey, Markus and Becker, Steffen}},
  booktitle    = {{Ninth International ACM Sigsoft Conference on the Quality of Software Architectures}},
  pages        = {{43--52}},
  publisher    = {{ACM New York, NY, USA}},
  title        = {{{Performance Analysis of Self-Adaptive Systems for Requirements Validation at Design-Time}}},
  year         = {{2013}},
}

@techreport{8222,
  author       = {{Küster, Jochen and Kovács, Dániel and Bauer, Eduard and Gerth, Christian}},
  publisher    = {{IBM Research}},
  title        = {{{Integrating Coverage Analysis into Test-driven Development of Model Transformations}}},
  year         = {{2013}},
}

@techreport{8223,
  author       = {{Huma, Zille and Gerth, Christian and Engels, Gregor}},
  publisher    = {{University of Paderborn, Germany}},
  title        = {{{Automated Service Discovery and Composition for On-the-Fly SOAs}}},
  year         = {{2013}},
}

@inproceedings{5752,
  author       = {{Yigitbas, Enes and Gerth, Christian and Sauer, Stefan}},
  booktitle    = {{Informatik 2013, 43. Jahrestagung der Gesellschaft für Informatik e.V. (GI), Informatik angepasst an Mensch, Organisation und Umwelt, 16.-20. September 2013, Koblenz, Deutschland}},
  pages        = {{2714--2723}},
  title        = {{{Konzeption modellbasierter Benutzungsschnittstellen für verteilte Selbstbedienungssysteme}}},
  year         = {{2013}},
}

@inbook{6276,
  author       = {{Klompmaker, Florian and Paelke, Volker and Fischer, Holger Gerhard}},
  booktitle    = {{Distributed, Ambient, and Pervasive Interactions}},
  isbn         = {{9783642393501}},
  issn         = {{0302-9743}},
  location     = {{Las Vegas, USA}},
  pages        = {{32--41}},
  publisher    = {{Springer Berlin Heidelberg}},
  title        = {{{A Taxonomy-Based Approach towards NUI Interaction Design}}},
  doi          = {{10.1007/978-3-642-39351-8_4}},
  volume       = {{8028}},
  year         = {{2013}},
}

@inbook{6279,
  author       = {{Fischer, Holger Gerhard and Strenge, Benjamin and Nebe, Karsten}},
  booktitle    = {{Design, User Experience, and Usability. Design Philosophy, Methods, and Tools}},
  isbn         = {{9783642392283}},
  issn         = {{0302-9743}},
  location     = {{Las Vegas, USA}},
  pages        = {{252--261}},
  publisher    = {{Springer Berlin Heidelberg}},
  title        = {{{Towards a Holistic Tool for the Selection and Validation of Usability Method Sets Supporting Human-Centered Design}}},
  doi          = {{10.1007/978-3-642-39229-0_28}},
  volume       = {{8012}},
  year         = {{2013}},
}

@inproceedings{6284,
  author       = {{Fischer, Holger Gerhard and Geis, Thomas and Molich, Rolf and Kluge, Oliver and Heimgärtner, Rüdiger and Hunkirchen, Peter}},
  booktitle    = {{Jahresband Usability Professionals}},
  pages        = {{28--34}},
  publisher    = {{German UPA}},
  title        = {{{Do You Speak Usability? - Aktueller Stand des Glossars und des Curriculums für den Certified Professional for Usability and User Experience (CPUX) der German UPA}}},
  year         = {{2013}},
}

@inproceedings{469,
  abstract     = {{Runtime monitoring aims at ensuring program safety by monitoring the program's behaviour during execution and taking appropriate action before a program violates some property.Runtime monitoring is in particular important when an exhaustive formal verification fails. While the approach allows for a safe execution of programs, it may impose a significant runtime overhead.In this paper, we propose a novel technique combining verification and monitoring which incurs no overhead during runtime at all. The technique proceeds by using the inconclusive result of a verification run as the basis for transforming the program into one where all potential points of failure are replaced by HALT statements. The new program is safe by construction, behaviourally equivalent to the original program (except for unsafe behaviour),and has the same performance characteristics.}},
  author       = {{Wonisch, Daniel and Schremmer, Alexander and Wehrheim, Heike}},
  booktitle    = {{Proceedings of the 11th International Conference on Software Engineering and Formal Methods (SEFM)}},
  pages        = {{244--258}},
  title        = {{{Zero Overhead Runtime Monitoring}}},
  doi          = {{10.1007/978-3-642-40561-7_17}},
  year         = {{2013}},
}

@inproceedings{470,
  abstract     = {{In OpenFlow [1], multiple switches share the same control plane which is centralized atwhat is called the OpenFlow controller. A switch only consists of a forwarding plane. Rules for forwarding individual packets (called ow entries in OpenFlow) are pushed from the controller to the switches. In a network with a high arrival rate of new ows, such as in a data center, the control trac between the switch and controller can become very high. As a consequence, routing of new ows will be slow. One way to reduce control trac is to use wildcarded ow entries. Wildcard ow entries can be used to create default routes in the network. However, since switches do not keep track of ows covered by a wildcard ow entry, the controller no longer has knowledge about individual ows. To nd out about these individual ows we propose an extension to the current OpenFlow standard to enable packet sampling of wildcard ow entries.}},
  author       = {{Wette, Philip and Karl, Holger}},
  booktitle    = {{Proceedings of the ACM SIGCOMM '13}},
  pages        = {{541--542}},
  title        = {{{Which Flows Are Hiding Behind My Wildcard Rule? Adding Packet Sampling to OpenFlow}}},
  doi          = {{10.1145/2486001.2491710}},
  year         = {{2013}},
}

@misc{471,
  author       = {{Tezer, Alina}},
  publisher    = {{Universität Paderborn}},
  title        = {{{Verteilte Erstellung und Aktualisierung von Schlüsselservern in identitätsbasierten Verschlüsselungssystemen}}},
  year         = {{2013}},
}

@article{476,
  abstract     = {{An elementary h-route ow, for an integer h 1, is a set of h edge- disjoint paths between a source and a sink, each path carrying a unit of ow, and an h-route ow is a non-negative linear combination of elementary h-routeows. An h-route cut is a set of edges whose removal decreases the maximum h-route ow between a given source-sink pair (or between every source-sink pair in the multicommodity setting) to zero. The main result of this paper is an approximate duality theorem for multicommodity h-route cuts and ows, for h 3: The size of a minimum h-route cut is at least f=h and at most O(log4 k f) where f is the size of the maximum h-routeow and k is the number of commodities. The main step towards the proof of this duality is the design and analysis of a polynomial-time approximation algorithm for the minimum h-route cut problem for h = 3 that has an approximation ratio of O(log4 k). Previously, polylogarithmic approximation was known only for h-route cuts for h 2. A key ingredient of our algorithm is a novel rounding technique that we call multilevel ball-growing. Though the proof of the duality relies on this algorithm, it is not a straightforward corollary of it as in the case of classical multicommodity ows and cuts. Similar results are shown also for the sparsest multiroute cut problem.}},
  author       = {{Kolman, Petr and Scheideler, Christian}},
  journal      = {{Theory of Computing Systems}},
  number       = {{2}},
  pages        = {{341--363}},
  publisher    = {{Springer}},
  title        = {{{Towards Duality of Multicommodity Multiroute Cuts and Flows: Multilevel Ball-Growing}}},
  doi          = {{10.1007/s00224-013-9454-3}},
  year         = {{2013}},
}

@inproceedings{477,
  abstract     = {{We consider the k-token dissemination problem, where k initially arbitrarily distributed tokens have to be disseminated to all nodes in a dynamic network (as introduced by Kuhn et al., STOC 2010). In contrast to general dynamic networks, our dynamic networks are unit disk graphs, i.e., nodes are embedded into the Euclidean plane and two nodes are connected if and only if their distance is at most R. Our worst-case adversary is allowed to move the nodes on the plane, but the maximum velocity v_max of each node is limited and the graph must be connected in each round. For this model, we provide almost tight lower and upper bounds for k-token dissemination if nodes are restricted to send only one token per round. It turns out that the maximum velocity v_max is a meaningful parameter to characterize dynamics in our model.}},
  author       = {{Abshoff, Sebastian and Benter, Markus and Cord-Landwehr, Andreas and Malatyali, Manuel and Meyer auf der Heide, Friedhelm}},
  booktitle    = {{Algorithms for Sensor Systems - 9th International Symposium on Algorithms and Experiments for Sensor Systems, Wireless Networks and Distributed Robotics, {ALGOSENSORS} 2013, Sophia Antipolis, France, September 5-6, 2013, Revised Selected Papers}},
  pages        = {{22--34}},
  title        = {{{Token Dissemination in Geometric Dynamic Networks}}},
  doi          = {{10.1007/978-3-642-45346-5_3}},
  year         = {{2013}},
}

@phdthesis{478,
  abstract     = {{Software systems are playing an increasing role in our everyday life, and as the amount of software applications grows, so does their complexity and the relevance of their computations. Software components can be found in many systems that are charged with safety-critical tasks, such as control systems for aviation or power plants. Hence, software verification techniques that are capable of proving the absence of critical errors are becoming more and more important in the field software engineering. A well-established approach to software verification is model checking. Applying this technique involves an exhaustive exploration of a state space model corresponding to the system under consideration. The major challenge in model checking is the so-called state explosion problem: The state space of a software system grows exponentially with its size. Thus, the straightforward modelling of real-life systems practically impossible. A common approach to this problem is the application of abstraction techniques, which reduce the original state space by mapping it on a significantly smaller abstract one. Abstraction inherently involves a loss of information, and thus, the resulting abstract model may be too imprecise for a definite result in verification. Therefore, abstraction is typically combined with abstraction refinement: An initially very coarse abstract model is iteratively refined, i.e. enriched with new details about the original system, until a level of abstraction is reached that is precise enough for a definite outcome. Abstraction refinement-based model checking is fully automatable and it is considered as one of the most promising approaches to the state explosion problem in verification. However, it is still faced with a number of challenges. There exist several types of abstraction techniques and not every type is equally well-suited for all kinds of systems and verification tasks. Moreover, the selection of adequate refinement steps is nontrivial and typically the most crucial part of the overall approach: Unfavourable refinement decisions can compromise the state space-reducing effect of abstraction, and as a consequence, can easily lead to the failure of verification. It is, however, hard to predict which refinement steps will eventually be expedient for verification – and which not.}},
  author       = {{Timm, Nils}},
  publisher    = {{Universität Paderborn}},
  title        = {{{Three-Valued Abstraction and Heuristic-Guided Refinement for Verifying Concurrent Systems}}},
  year         = {{2013}},
}

@inproceedings{481,
  abstract     = {{Cloud computing offers high availability, dynamic scalability, and elasticity requiring only very little administration. However, this service comes with financial costs. Peer-to-peer systems, in contrast, operate at very low costs but cannot match the quality of service of the cloud. This paper focuses on the case study of Wikipedia and presents an approach to reduce the operational costs of hosting similar websites in the cloud by using a practical peer-to-peer approach. The visitors of the site are joining a Chord overlay, which acts as first cache for article lookups. Simulation results show, that up to 72% of the article lookups in Wikipedia could be answered by other visitors instead of using the cloud.}},
  author       = {{Graffi, Kalman and Bremer, Lars}},
  booktitle    = {{Proceedings of the International Conference on Communications (ICC'13)}},
  pages        = {{3444 -- 3449 }},
  title        = {{{Symbiotic Coupling of P2P and Cloud Systems: The Wikipedia Case}}},
  doi          = {{10.1109/ICC.2013.6655082}},
  year         = {{2013}},
}

@inproceedings{484,
  abstract     = {{One of the main ideas of Service-Oriented Computing (SOC) is the delivery of flexibly composable services provided on world-wide markets. For a successful service discovery,service requests have to be matched with the available service offers. However, in a situation in which no service that completely matches the request can be discovered, the customer may tolerate slight discrepancies between request and offer. Some existing fuzzy matching approaches are able to detectsuch service variants, but they do not allow to explicitly specify which parts of a request are not mandatory. In this paper, we improve an existing service matching approach based onVisual Contracts leveraging our preliminary work of design pattern detection. Thereby, we support explicit specifications of service variants and realize gradual matching results that can be ranked in order to discover the service offer that matches a customer’s request best.}},
  author       = {{Platenius, Marie Christin and von Detten, Markus and Gerth, Christian and Schäfer, Wilhelm and Engels, Gregor}},
  booktitle    = {{IEEE 20th International Conference on Web Services (ICWS 2013)}},
  pages        = {{613--614}},
  title        = {{{Service Matching under Consideration of Explicitly Specified Service Variants}}},
  doi          = {{10.1109/ICWS.2013.98}},
  year         = {{2013}},
}

@inproceedings{485,
  abstract     = {{Software composition has been studied as a subject of state based planning for decades. Existing composition approaches that are efficient enough to be used in practice are limited to sequential arrangements of software components. This restriction dramatically reduces the number of composition problems that can be solved. However, there are many composition problems that could be solved by existing approaches if they had a possibility to combine components in very simple non-sequential ways. To this end, we present an approach that arranges not only basic components but also composite components. Composite components enhance the structure of the composition by conditional control flows. Through algorithms that are written by experts, composite components are automatically generated before the composition process starts. Therefore, our approach is not a substitute for existing composition algorithms but complements them with a preprocessing step. We verified the validity of our approach through implementation of the presented algorithms.}},
  author       = {{Mohr, Felix and Kleine Büning, Hans}},
  booktitle    = {{Proceedings of the 15th International Conference on Information Integration and Web-based Applications & Services (iiWAS)}},
  pages        = {{676--680}},
  title        = {{{Semi-Automated Software Composition Through Generated Components}}},
  doi          = {{10.1145/2539150.2539235}},
  year         = {{2013}},
}

