@article{17666,
  abstract     = {{Software Defined Networks (SDN) and Network Function Virtualisation (NFV) provide the basis for autonomous response and mitigation against attacks on networked computer infrastructures. We propose a new framework that uses SDNs and NFV to achieve this goal: Secure Autonomous Response Network (SARNET). In a SARNET, an agent running a control loop constantly assesses the security state of the network by means of observables. The agent reacts to and resolves security problems, while learning from its previous decisions. Two main metrics govern the decision process in a SARNET: impact and efficiency; these metrics can be used to compare and evaluate countermeasures and are the building blocks for self-learning SARNETs that exhibit autonomous response. In this paper we present the software implementation of the SARNET framework, evaluate it in a real-life network and discuss the tradeoffs between parameters used by the SARNET agent and the efficiency of its actions.}},
  author       = {{Koning, R. and de Graaff, B. and Polevoy, Gleb and Meijer, R. and de Laat, C. and Grosso, P.}},
  issn         = {{0167-739X}},
  journal      = {{Future Generation Computer Systems}},
  keywords     = {{Software defined networks, Network function virtualization, Cyber attacks, Cyber security, Defense efficiency, Overlay networks}},
  title        = {{{Measuring the efficiency of SDN mitigations against attacks on computer infrastructures}}},
  doi          = {{https://doi.org/10.1016/j.future.2018.08.011}},
  year         = {{2018}},
}

@inbook{2322,
  abstract     = {{The vision of On-The-Fly Computing is an automatic composition
of existing software services. Based on natural language software
descriptions, end users will receive compositions tailored to their needs.
For this reason, the quality of the initial software service description
strongly determines whether a software composition really meets the expectations
of end users. In this paper, we expose open NLP challenges
needed to be faced for service composition in On-The-Fly Computing.}},
  author       = {{Bäumer, Frederik Simon and Geierhos, Michaela}},
  booktitle    = {{Proceedings of the 23rd International Conference on Natural Language and Information Systems}},
  editor       = {{Silberztein, Max  and Atigui, Faten  and Kornyshova, Elena  and Métais, Elisabeth  and Meziane, Farid }},
  isbn         = {{978-3-319-91946-1}},
  keywords     = {{Requirements Extraction, Temporal Reordering of Software Functions, Inaccuracy Compensation}},
  location     = {{Paris, France}},
  pages        = {{509--513}},
  publisher    = {{Springer}},
  title        = {{{How to Deal with Inaccurate Service Descriptions in On-The-Fly Computing: Open Challenges}}},
  doi          = {{10.1007/978-3-319-91947-8_53}},
  volume       = {{10859}},
  year         = {{2018}},
}

@article{2331,
  abstract     = {{A user generally writes software requirements in ambiguous and incomplete form by using natural language; therefore, a software developer may have difficulty in clearly understanding what the meanings are. To solve this problem with automation, we propose a classifier for semantic annotation with manually pre-defined semantic categories. To improve our classifier, we carefully designed syntactic features extracted by constituency and dependency parsers. Even with a small dataset and a large number of classes, our proposed classifier records an accuracy of 0.75, which outperforms the previous model, REaCT.}},
  author       = {{Kim, Yeongsu  and Lee, Seungwoo and Dollmann, Markus and Geierhos, Michaela}},
  issn         = {{2207-6360}},
  journal      = {{International Journal of Advanced Science and Technology}},
  keywords     = {{Software Engineering, Natural Language Processing, Semantic Annotation, Machine Learning, Feature Engineering, Syntactic Structure}},
  pages        = {{123--136}},
  publisher    = {{SERSC Australia}},
  title        = {{{Improving Classifiers for Semantic Annotation of Software Requirements with Elaborate Syntactic Structure}}},
  doi          = {{10.14257/ijast.2018.112.12}},
  volume       = {{112}},
  year         = {{2018}},
}

@inproceedings{4339,
  abstract     = {{On-The-Fly Computing is the vision of covering software needs of end users by fully-automatic compositions of existing software services. End users will receive so-called service compositions tailored to their very individual needs, based on natural language software descriptions. This everyday language may contain inaccuracies and incompleteness, which are well-known challenges in requirements engineering. In addition to existing approaches that try to automatically identify and correct these deficits, there are also new trends to involve users more in the elaboration and refinement process. In this paper, we present the relevant state of the art in the field of automated detection and compensation of multiple inaccuracies in natural language service descriptions and name open challenges needed to be tackled in NL-based software service composition. }},
  author       = {{Bäumer, Frederik Simon and Geierhos, Michaela}},
  booktitle    = {{Proceedings of the 24th International Conference on Information and Software Technologies (ICIST 2018)}},
  editor       = {{Damaševičius, Robertas and Vasiljevienė, Giedrė}},
  isbn         = {{9783319999715}},
  issn         = {{1865-0929}},
  keywords     = {{Inaccuracy detection, Natural language software requirements}},
  location     = {{Vilnius, Lithuania}},
  pages        = {{559--570}},
  publisher    = {{Springer}},
  title        = {{{NLP in OTF Computing: Current Approaches and Open Challenges}}},
  doi          = {{10.1007/978-3-319-99972-2_46}},
  volume       = {{920}},
  year         = {{2018}},
}

@inproceedings{44,
  abstract     = {{Natural language software requirements descriptions enable end users to formulate their wishes and expectations for a future software product without much prior knowledge in requirements engineering. However, these descriptions are susceptible to linguistic inaccuracies such as ambiguities and incompleteness that can harm the development process. There is a number of software solutions that can detect deficits in requirements descriptions and partially solve them, but they are often hard to use and not suitable for end users. For this reason, we develop a software system that helps end-users to create unambiguous and complete requirements descriptions by combining existing expert tools and controlling them using automatic compensation strategies. In order to recognize the necessity of individual compensation methods in the descriptions, we have developed linguistic indicators, which we present in this paper. Based on these indicators, the whole text analysis pipeline is ad-hoc configured and thus adapted to the individual circumstances of a requirements description.}},
  author       = {{Bäumer, Frederik Simon and Geierhos, Michaela}},
  booktitle    = {{Proceedings of the 51st Hawaii International Conference on System Sciences}},
  isbn         = {{978-0-9981331-1-9}},
  keywords     = {{Software Product Lines: Engineering, Services, and Management, Ambiguities, Incompleteness, Natural Language Processing, Software Requirements}},
  location     = {{Big Island, Waikoloa Village}},
  pages        = {{5746--5755}},
  title        = {{{Flexible Ambiguity Resolution and Incompleteness Detection in Requirements Descriptions via an Indicator-based Configuration of Text Analysis Pipelines}}},
  doi          = {{10125/50609}},
  year         = {{2018}},
}

@inproceedings{48867,
  abstract     = {{Assessing the performance of stochastic optimization algorithms in the field of multi-objective optimization is of utmost importance. Besides the visual comparison of the obtained approximation sets, more sophisticated methods have been proposed in the last decade, e. g., a variety of quantitative performance indicators or statistical tests. In this paper, we present tools implemented in the R package ecr, which assist in performing comprehensive and sound comparison and evaluation of multi-objective evolutionary algorithms following recommendations from the literature.}},
  author       = {{Bossek, Jakob}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference Companion}},
  isbn         = {{978-1-4503-5764-7}},
  keywords     = {{evolutionary optimization, performance assessment, software-tools}},
  pages        = {{1350–1356}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{Performance Assessment of Multi-Objective Evolutionary Algorithms with the R Package ecr}}},
  doi          = {{10.1145/3205651.3208312}},
  year         = {{2018}},
}

@article{20557,
  author       = {{Lillack, Max and Kästner, Christian and Bodden, Eric}},
  issn         = {{0098-5589}},
  journal      = {{IEEE Transactions on Software Engineering}},
  keywords     = {{Androids, Bluetooth, Humanoid robots, Java, Software, Tools, Configuration options, Static analysis, Variability mining}},
  number       = {{99}},
  pages        = {{1--1}},
  title        = {{{Tracking Load-time Configuration Options}}},
  doi          = {{10.1109/TSE.2017.2756048}},
  volume       = {{PP}},
  year         = {{2017}},
}

@inproceedings{97,
  abstract     = {{Bridging the gap between informal, imprecise, and vague user requirements descriptions and precise formalized specifications is the main task of requirements engineering. Techniques such as interviews or story telling are used when requirements engineers try to identify a user's needs. The requirements specification process is typically done in a dialogue between users, domain experts, and requirements engineers. In our research, we aim at automating the specification of requirements. The idea is to distinguish between untrained users and trained users, and to exploit domain knowledge learned from previous runs of our system. We let untrained users provide unstructured natural language descriptions, while we allow trained users to provide examples of behavioral descriptions. In both cases, our goal is to synthesize formal requirements models similar to statecharts. From requirements specification processes with trained users, behavioral ontologies are learned which are later used to support the requirements specification process for untrained users. Our research method is original in combining natural language processing and search-based techniques for the synthesis of requirements specifications. Our work is embedded in a larger project that aims at automating the whole software development and deployment process in envisioned future software service markets.}},
  author       = {{van Rooijen, Lorijn and Bäumer, Frederik Simon and Platenius, Marie Christin and Geierhos, Michaela and Hamann, Heiko and Engels, Gregor}},
  booktitle    = {{2017 IEEE 25th International Requirements Engineering Conference Workshops (REW)}},
  isbn         = {{978-1-5386-3489-9}},
  keywords     = {{Software, Unified modeling language, Requirements engineering, Ontologies, Search problems, Natural languages}},
  location     = {{Lisbon, Portugal}},
  pages        = {{379--385}},
  publisher    = {{IEEE}},
  title        = {{{From User Demand to Software Service: Using Machine Learning to Automate the Requirements Specification Process}}},
  doi          = {{10.1109/REW.2017.26}},
  year         = {{2017}},
}

@inproceedings{48863,
  abstract     = {{The novel R package ecr (version 2), short for Evolutionary Computation in R, provides a comprehensive collection of building blocks for constructing powerful evolutionary algorithms for single- and multi-objective continuous and combinatorial optimization problems. It allows to solve standard optimization tasks with few lines of code using a black-box approach. Moreover, rapid prototyping of non-standard ideas is possible via an explicit, white-box approach. This paper describes the design principles of the package and gives some introductory examples on how to use the package in practise.}},
  author       = {{Bossek, Jakob}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference Companion}},
  isbn         = {{978-1-4503-4939-0}},
  keywords     = {{evolutionary optimization, software-tools}},
  pages        = {{1187–1193}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{Ecr 2.0: A Modular Framework for Evolutionary Computation in R}}},
  doi          = {{10.1145/3067695.3082470}},
  year         = {{2017}},
}

@inbook{51827,
  author       = {{Schmitt, Martin}},
  booktitle    = {{International Communities of Invention and Innovation}},
  editor       = {{Tatnall, Arthur and Leslie, Christopher}},
  keywords     = {{Kreditscoring, Informationssystem, Software}},
  pages        = {{141–164}},
  publisher    = {{Springer}},
  title        = {{{The Code of Banking. Software as the Digitalization of German Savings Banks}}},
  year         = {{2016}},
}

@inproceedings{10673,
  author       = {{Ho, Nam and Ahmed, Abdullah Fathi and Kaufmann, Paul and Platzner, Marco}},
  booktitle    = {{Proc. NASA/ESA Conf. Adaptive Hardware and Systems (AHS)}},
  keywords     = {{cache storage, field programmable gate arrays, multiprocessing systems, parallel architectures, reconfigurable architectures, FPGA, dynamic reconfiguration, evolvable cache mapping, many-core architecture, memory-to-cache address mapping function, microarchitectural optimization, multicore architecture, nature-inspired optimization, parallelization degrees, processor, reconfigurable cache mapping, reconfigurable computing, Field programmable gate arrays, Software, Tuning}},
  pages        = {{1--7}},
  title        = {{{Microarchitectural optimization by means of reconfigurable and evolvable cache mappings}}},
  doi          = {{10.1109/AHS.2015.7231178}},
  year         = {{2015}},
}

@inproceedings{10779,
  author       = {{Guettatfi, Zakarya and Kermia, Omar and Khouas, Abdelhakim}},
  booktitle    = {{25th International Conference on Field Programmable Logic and Applications (FPL)}},
  issn         = {{1946-147X}},
  keywords     = {{embedded systems, field programmable gate arrays, operating systems (computers), scheduling, μC/OS-II, FPGAs, OS foundation, SafeRTOS, Xenomai, chip utilization ration, complex time constraints, embedded systems, hard real-time hardware task allocation, hard real-time hardware task scheduling, hardware-software real-time operating systems, partially reconfigurable field-programmable gate arrays, resource constraints, safety-critical RTOS, Field programmable gate arrays, Hardware, Job shop scheduling, Real-time systems, Shape, Software}},
  publisher    = {{Imperial College}},
  title        = {{{Over effective hard real-time hardware tasks scheduling and allocation}}},
  doi          = {{10.1109/FPL.2015.7293994}},
  year         = {{2015}},
}

@misc{33312,
  abstract     = {{Mechatronic systems are used more than ever in human life. They can be found in a very wide range of domain contexts, from household appliances, and cars, to medical equipment. Mechatronic systems, as a kind of embedded systems, are the tight integration of mechanical and electrical engineering, which embed software systems. Information security of mechatronic systems has not received much attention yet. However, wherever data exists, cyber attacks threaten mechatronic systems.

The thesis focuses on the early design stages of the development of mechatronic systems. Model sequence diagrams (MSDs) are used to model requirements with real-time and safety properties. In this thesis, MSDs are extended such that security properties for example authenticity and privacy can be modeled and analyzed automatically.}},
  author       = {{Schwichtenberg, Bahar}},
  keywords     = {{Software Architecture, Requirements Engineering, Embedded Systems}},
  title        = {{{Early Prediction of Security Properties for Mechatronic Systems}}},
  year         = {{2015}},
}

@article{41866,
  author       = {{Russer, Johannes A. and Uddin, Nasir and Awny, Ahmed Sanaa and Thiede, Andreas and Russer, Peter}},
  issn         = {{2162-2264}},
  journal      = {{IEEE Electromagnetic Compatibility Magazine}},
  keywords     = {{Electrical and Electronic Engineering, Computer Networks and Communications, Instrumentation, Signal Processing, Software}},
  number       = {{3}},
  pages        = {{79--85}},
  publisher    = {{Institute of Electrical and Electronics Engineers (IEEE)}},
  title        = {{{Near-field measurement of stochastic electromagnetic fields}}},
  doi          = {{10.1109/memc.2015.7336761}},
  volume       = {{4}},
  year         = {{2015}},
}

@inproceedings{10674,
  author       = {{Ho, Nam and Kaufmann, Paul and Platzner, Marco}},
  booktitle    = {{24th Intl. Conf. on Field Programmable Logic and Applications (FPL)}},
  keywords     = {{Linux, hardware-software codesign, multiprocessing systems, parallel processing, LEON3 multicore platform, Linux kernel, PMU, hardware counters, hardware-software infrastructure, high performance embedded computing, perf_event, performance monitoring unit, Computer architecture, Hardware, Monitoring, Phasor measurement units, Radiation detectors, Registers, Software}},
  pages        = {{1--4}},
  title        = {{{A hardware/software infrastructure for performance monitoring on LEON3 multicore platforms}}},
  doi          = {{10.1109/FPL.2014.6927437}},
  year         = {{2014}},
}

@inproceedings{36918,
  abstract     = {{This paper presents an advanced eight levels spanning SystemC based virtual platform methodology and framework - referred to as HeroeS 3 - providing smooth application to platform mapping and continuous co-refinement of a virtual prototype with its physical environment model. For heterogeneity support, various SystemC extensions are combined covering continuous/discrete models of computation and different communication abstractions, such as analog mixed-signal models, abstract RTOS/HAL/middleware models, TLM bus models, and QEMU wrappers. We enable dependability assessment by Fault Effect Modeling (FEM) at the virtual prototype in order to avoid risking physical injury or damage. Also, simulation results are deterministic and can be evaluated interactively or offline. We apply FEM to both the physical environment model and the different abstractions of the virtual prototype. Currently, we focus on sensor failures and application control flow errors.}},
  author       = {{Becker, Markus and Kuznik, Christoph and Müller, Wolfgang}},
  keywords     = {{Computational modeling, Finite element analysis, Prototypes, Abstracts, Software, Fault tolerance, Fault tolerant systems}},
  location     = {{Berlin}},
  publisher    = {{IEEE}},
  title        = {{{Fault Effect Modeling in a Heterogeneous SystemC Based Virtual Platform Framework for Cyber Physical Systems}}},
  doi          = {{10.1109/ICCPS.2014.6843726}},
  year         = {{2014}},
}

@article{46266,
  author       = {{Alizadeh, Bijan and Behnam, Payman and Sadeghi-Kohan, Somayeh}},
  issn         = {{0018-9340}},
  journal      = {{IEEE Transactions on Computers}},
  keywords     = {{Computational Theory and Mathematics, Hardware and Architecture, Theoretical Computer Science, Software}},
  pages        = {{1--1}},
  publisher    = {{Institute of Electrical and Electronics Engineers (IEEE)}},
  title        = {{{A Scalable Formal Debugging Approach with Auto-Correction Capability based on Static Slicing and Dynamic Ranking for RTL Datapath Designs}}},
  doi          = {{10.1109/tc.2014.2329687}},
  year         = {{2014}},
}

@inproceedings{22737,
  author       = {{Becker, Matthias and Luckey, Markus and Becker, Steffen}},
  booktitle    = {{{Proceedings of the 8th International ACM SIGSOFT Conference on Quality of Software Architectures (QoSA)}}},
  isbn         = {{978-1-4503-1346-9}},
  keywords     = {{model-driven performance engineering, self-*, Self-adaptation, software performance}},
  pages        = {{117--122}},
  publisher    = {{ACM}},
  title        = {{{Model-driven Performance Engineering of Self-adaptive Systems: A Survey}}},
  doi          = {{10.1145/2304696.2304716}},
  year         = {{2012}},
}

@article{4706,
  abstract     = {{Purpose – The purpose of this paper is to show how to employ complex event processing (CEP) for the observation and management of business processes. It proposes a conceptual architecture of BPM event producer, processor, and consumer and describes technical implications for the application with standard software in a perfect order scenario. Design/methodology/approach – The authors discuss business process analytics as the technological background. The capabilities of CEP in a BPM context are outlined an architecture design is proposed. A sophisticated proof-of-concept demonstrates its applicability. Findings – The results overcome the separation and data latency issues of process controlling, monitoring, and simulation. Distinct analyses of past, present, and future blur into a holistic real-time approach. The authors highlight the necessity for configurable event producer in BPM engines, process event support in CEP engines, a common process event format, connectors to visualizers, notifiers and return channels to the BPM engine. Research limitations/implications – Further research will thoroughly evaluate the approach in a variety of business settings. New concepts and standards for the architecture's building blocks will be needed to improve maintainability and operability. Practical implications – Managers learn how CEP can yield insights into business processes' operations. The paper illustrates a path to overcome inflexibility, latency, and missing feedback mechanisms of current process modeling and control solutions. Software vendors might be interested in the conceptualization and the described needs for further development. Originality/value – So far, there is no commercial CEP-based BPM solution which facilitates a round trip from insight to action as outlines. As major software vendors have begun developing solutions (BPM/BPA solutions), this paper will stimulate a debate between research and practice on suitable design and technology.}},
  author       = {{Janiesch, Christian and Matzner, Martin and Müller, Oliver}},
  isbn         = {{1020120096}},
  issn         = {{14637154}},
  journal      = {{Business Process Management Journal}},
  keywords     = {{Architecture, Business activity monitoring, Business process management, Business process re-engineering, Complex event processing, Computer software, Standard software}},
  number       = {{4}},
  pages        = {{625----643}},
  title        = {{{Beyond process monitoring: A proof-of-concept of event-driven business activity management}}},
  doi          = {{10.1108/14637151211253765}},
  year         = {{2012}},
}

@inproceedings{1120,
  abstract     = {{SCM is a simple, modular and flexible system for web monitoring and customer interaction management. In our view, its main advantages are the following: It is completely web based. It combines all technologies, data, software agents and human agents involved in the monitoring and customer interaction process. It can be used for messages written in any natural language. Although the prototype of SCM is designed for classifying and processing messages about mobile-phone related problems in social networks, SCM can easily be adapted to other text types such as discussion board posts, blogs or emails. Unlike comparable systems, SCM uses linguistic technologies to classify messages and recognize paraphrases of product names. For two reasons, product name paraphrasing plays a major role in SCM: First, product names typically have many, sometimes hundreds or thousands of intralingual paraphrases. Secondly, product names have interlingual paraphrases: The same products are often called or spelt differently in different countries and/or languages. By mapping product name variants to an international canonical form, SCM allows for answering questions like Which statements are made about this mobile phone in which languages/in which social networks/in which countries/...? The SCM product name paraphrasing engine is designed in such a way that standard variants are assigned automatically, regular variants are assigned semiautomatically and idiosyncratic variants can be added manually. With this and similar features we try to realize our philosophy of simplicity, modularity and flexibility: Whatever can be done automatically is done automatically. But manual intervention is always possible and easy and it does not conflict in any way with the automatic functions of SCM.}},
  author       = {{Schuster, Jörg and Lee, Yeong Su and Kobothanassi, Despina  and Bargel, Matthias and Geierhos, Michaela}},
  booktitle    = {{International Conference on Information Society (i-Society 2011)}},
  isbn         = {{978-1-61284-148-9}},
  keywords     = {{Social Media Business Integration, Contact Center Application Support, Monitoring Social Conversations, Social Customer Interaction Management, Monitoring, Software Agents}},
  location     = {{London, UK}},
  pages        = {{153--158}},
  publisher    = {{IEEE}},
  title        = {{{SCM - A Simple, Modular and Flexible Customer Interaction Management System}}},
  year         = {{2011}},
}

