@inproceedings{26539,
  abstract     = {{In control design most control strategies are model-based and require accurate models to be applied successfully. Due to simplifications and the model-reality-gap physics-derived models frequently exhibit deviations from real-world-systems. Likewise, purely data-driven methods often do not generalise well enough and may violate physical laws. Recently Physics-Guided Neural Networks (PGNN) and physics-inspired loss functions separately have shown promising results to conquer these drawbacks. In this contribution we extend existing methods towards the identification of non-autonomous systems and propose a combined approach PGNN-L, which uses a PGNN and a physics-inspired loss term (-L) to successfully identify the system's dynamics, while maintaining the consistency with physical laws. The proposed method is demonstrated on two real-world nonlinear systems and outperforms existing techniques regarding complexity and reliability.}},
  author       = {{Götte, Ricarda-Samantha and Timmermann, Julia}},
  booktitle    = {{2022 3rd International Conference on Artificial Intelligence, Robotics and Control (AIRC)}},
  keywords     = {{data-driven, physics-based, physics-informed, neural networks, system identification, hybrid modelling}},
  location     = {{Cairo, Egypt}},
  pages        = {{67--76}},
  title        = {{{Composed Physics- and Data-driven System Identification for Non-autonomous Systems in Control Engineering}}},
  doi          = {{10.1109/AIRC56195.2022.9836982}},
  year         = {{2022}},
}

@inproceedings{29803,
  abstract     = {{Ultrasonic wire bonding is a solid-state joining process used to form electrical interconnections in micro and
power electronics and batteries. A high frequency oscillation causes a metallurgical bond deformation in
the contact area. Due to the numerous physical influencing factors, it is very difficult to accurately capture
this process in a model. Therefore, our goal is to determine a suitable feed-forward control strategy for the
bonding process even without detailed model knowledge. We propose the use of batch constrained Bayesian
optimization for the control design. Hence, Bayesian optimization is precisely adapted to the application of
bonding: the constraint is used to check one quality feature of the process and the use of batches leads to
more efficient experiments. Our approach is suitable to determine a feed-forward control for the bonding
process that provides very high quality bonds without using a physical model. We also show that the quality
of the Bayesian optimization based control outperforms random search as well as manual search by a user.
Using a simple prior knowledge model derived from data further improves the quality of the connection.
The Bayesian optimization approach offers the possibility to perform a sensitivity analysis of the control
parameters, which allows to evaluate the influence of each control parameter on the bond quality. In summary,
Bayesian optimization applied to the bonding process provides an excellent opportunity to develop a feedforward
control without full modeling of the underlying physical processes.}},
  author       = {{Hesse, Michael and Hunstig, Matthias and Timmermann, Julia and Trächtler, Ansgar}},
  booktitle    = {{Proceedings of the 11th International Conference on Pattern Recognition Applications and Methods (ICPRAM)}},
  isbn         = {{978-989-758-549-4}},
  keywords     = {{Bayesian optimization, Wire bonding, Feed-forward control, model-free design}},
  location     = {{Online}},
  pages        = {{383--394}},
  title        = {{{Batch Constrained Bayesian Optimization for UltrasonicWire Bonding Feed-forward Control Design}}},
  year         = {{2022}},
}

@inproceedings{56993,
  author       = {{Schaffer, Michael and Lea, Budde and Schulte, Carsten and Buhl, Heike M.}},
  booktitle    = {{52nd DGPs Congress  - Abstracts}},
  editor       = {{Bermeitinger, Christina and  Greve, Werner}},
  keywords     = {{Cognition, Motivation, Technical Model, Mental Model, Explainer, Explainee, Qualitative Content Analysis}},
  location     = {{Hildesheim}},
  title        = {{{Die Anpassungen von Erklärungen an das Verständnis des Erklärgegenstandes der Gesprächspartner}}},
  year         = {{2022}},
}

@misc{25126,
  abstract     = {{Motivated by the prospect of computing agents that explore unknown environments and construct convex hulls on the nanoscale, we investigate the capabilities and limitations of a single deterministic finite automaton robot in the three-dimensional hybrid model for programmable matter. In this model, active robots move on a set of passive tiles, called configuration, with the geometric shape of rhombic dodecahedra on the adjacency graph of the face-centered cubic sphere-packing. We show that the exploration problem is equally hard in the hybrid model and in three-dimensional mazes, in which tiles have the shape of cubes and are positioned at the vertices of $\mathbb{Z}^3$. Thereby, a single robot with a constant number of pebbles cannot solve this problem in the hybrid model on arbitrary configurations. We provide algorithms for a robot with two pebbles that solve the exploration problem in the subclass of compact configurations of size $n$ in $\O(n^3)$ rounds. Further, we investigate the robot's capabilities of detection and hull construction in terms of restricted orientation convexity. We show that a robot without any pebble can detect strong $\O$-convexity in $\O(n)$ rounds, but cannot detect weak $\O$-convexity, not even if provided with a single pebble. Assuming that a robot can construct tiles from scratch and deconstruct previously constructed tiles, we show that the strong $\O$-hull of any given configuration of size $n$ can be constructed in $\O(n^4)$ rounds, even if the robot cannot distinguish constructed from native tiles.}},
  author       = {{Liedtke, David Jan}},
  keywords     = {{Robot Exploration, Finite Automaton, Hybrid Model for Programmable Matter, Convex Hull}},
  title        = {{{Exploration and Convex Hull Construction in the Three-Dimensional Hybrid Model}}},
  year         = {{2021}},
}

@inproceedings{21639,
  abstract     = {{The development of effective business models is an essential task in highly competitive markets like mobile ecosystems. Existing development methods for these business models do not specifically focus that the development process profoundly depends on the situation (e.g., market size, regulations) of the mobile app developer. Here, a mismatch between method and situation can lead to poor resource management and longer development cycles. In software engineering, situational method engineering is used for software projects to configure a development method out of a method repository based on the project situation. Analogously, we support creating situation-specific business model development methods with a method base and new user roles. Here, the method engineer obtains the knowledge of the domain expert and stores it in the method base as elements, building blocks, and patterns. The expert knowledge is derived from a grey literature review on mobile development processes. After this, the method engineer constructs the development method based on the described situation of the business developer. We provide an open-source tool and evaluate it by constructing a local event platform's business model development method.    }},
  author       = {{Gottschalk, Sebastian and Yigitbas, Enes and Nowosad, Alexander and Engels, Gregor}},
  booktitle    = {{Enterprise, Business-Process and Information Systems Modeling}},
  keywords     = {{Business Model Development, Situational Method Engineering, Mobile App, Business Model Development Tools}},
  publisher    = {{Springer}},
  title        = {{{Situation-specific Business Model Development Methods for Mobile App Developers}}},
  doi          = {{10.1007/978-3-030-79186-5_17}},
  year         = {{2021}},
}

@inbook{25528,
  abstract     = {{Developing effective business models is a complex process for a company where several tasks (e.g., conduct customer interviews) need to be accomplished, and decisions (e.g., advertisement as a revenue stream) must be made. Here, domain experts can guide the choices of tasks and decisions with their knowledge. Nevertheless, this knowledge needs to match the situation of the company (e.g., financial resources) and the application domain of the product/service (e.g., mobile app) to reduce the risk of developing ineffective business models with low market penetration. This is not covered by one-size-fits-all development methods without tailoring before the enaction.
Therefore, we conduct a design science study to create a situation-specific development approach for business models. Based on situational method engineering and our previous work in storing knowledge of methods and models in distinct repositories, this paper shows the situation-specific composition and enaction of business model development methods. First, the method engineer composes the development method out of both repositories based on the situational context. Second, the business developer enacts the method and develops the business model.  We implement the approach in a tool and evaluate it with a industrial case study on mobile apps.}},
  author       = {{Gottschalk, Sebastian and Yigitbas, Enes and Nowosad, Alexander and Engels, Gregor}},
  booktitle    = {{Product-focused Software Process Improvement}},
  keywords     = {{Business Model Development, Situational Method Engineering, Lean Development, Kanban Boards, Canvas Models}},
  location     = {{Turin}},
  publisher    = {{Springer}},
  title        = {{{Situation- and  Domain-specific Composition and Enactment of Business Model Development Methods}}},
  year         = {{2021}},
}

@article{32560,
  abstract     = {{Several methods are available to answer questions regarding similarity and accuracy,
each of which has specific properties and limitations. This study focuses on the
Latent Congruence Model (LCM; Cheung, 2009), because of its capacity to deal
with cross-informant measurement invariance issues. Until now, no cross-national
applications of LCM are present in the literature, perhaps because of the difficulty
to deal with both cross-national and cross-informant measurement issues implied by
those models. This study presents a step-by-step procedure to apply LCM to dyadic
cross-national research designs controlling for both cross-national and cross-informant
measurement invariance. An illustrative example on parent–child support exchanges in
Italy and Germany is provided. Findings help to show the different possible scenarios
of partial invariance, and a discussion related to how to deal with those scenarios is
provided. Future perspectives in the study of parent–child similarity and accuracy in
cross-national research will be discussed.}},
  author       = {{Tagliabue, Semira and Zambelli, Michela and Sorgente, Angela and Sommer, Sabrina and Hoellger, Christian and Buhl, Heike M. and Lanz, Margherita}},
  issn         = {{1664-1078}},
  journal      = {{Frontiers in Psychology}},
  keywords     = {{latent congruence model, measurement invariance, similarity, accuracy, cross-national, cross-informant, parent-child relationship, support exchanges}},
  publisher    = {{Frontiers Media SA}},
  title        = {{{Latent Congruence Model to Investigate Similarity and Accuracy in Family Members' Perception: The Challenge of Cross-National and Cross-Informant Measurement (Non)Invariance}}},
  doi          = {{10.3389/fpsyg.2021.672383}},
  volume       = {{12}},
  year         = {{2021}},
}

@article{25046,
  abstract     = {{<jats:p>While increasing digitalization enables multiple advantages for a reliable operation of technical systems, a remaining challenge in the context of condition monitoring is seen in suitable consideration of uncertainties affecting the monitored system. Therefore, a suitable prognostic approach to predict the remaining useful lifetime of complex technical systems is required. To handle different kinds of uncertainties, a novel Multi-Model-Particle Filtering-based prognostic approach is developed and evaluated by the use case of rubber-metal-elements. These elements are maintained preventively due to the strong influence of uncertainties on their behavior. In this paper, two measurement quantities are compared concerning their ability to establish a prediction of the remaining useful lifetime of the monitored elements and the influence of present uncertainties. Based on three performance indices, the results are evaluated. A comparison with predictions of a classical Particle Filter underlines the superiority of the developed Multi-Model-Particle Filter. Finally, the value of the developed method for enabling condition monitoring of technical systems related to uncertainties is given exemplary by a comparison between the preventive and the predictive maintenance strategy for the use case.</jats:p>}},
  author       = {{Bender, Amelie}},
  issn         = {{2075-1702}},
  journal      = {{Machines}},
  keywords     = {{prognostics, RUL predictions, particle filter, uncertainty consideration, Multi-Model-Particle Filter, model-based approach, rubber-metal-elements, predictive maintenance}},
  number       = {{10}},
  title        = {{{A Multi-Model-Particle Filtering-Based Prognostic Approach to Consider Uncertainties in RUL Predictions}}},
  doi          = {{10.3390/machines9100210}},
  volume       = {{9}},
  year         = {{2021}},
}

@article{35202,
  abstract     = {{Purpose: This study aims at investigating how digitalisation (in the sense of industry 4.0) has changed the work of farmers and how they experience the changes from more traditional work to digitalised agriculture. It also investigates what knowledge farmers require on digitalised farms and how they acquire it. Dairy farming was used as domain of investigation since it, unlike other industries, has strongly been affected by digitalisation throughout the last years.

Method: Exploratory interviews with 10 livestock farmers working on digitalised dairy farms were analysed using qualitative content analysis. A deductive and inductive coding strategy was used. 

Findings: Farming work has changed from more manual tasks towards symbol manipulation and data processing. Farmers must be able to use computers and other digital devices to retrieve and analyse sensor data that allow them to monitor and control the processes on their farm. For this new kind of work, farmers require elaborated mental models that link traditional farming knowledge with knowledge about digital systems, including a strong understanding of production processes underlying their farm. Learning is mostly based on instructions offered by manufacturers of the new technology as well as informal and non-formal learning modes. Even younger farmers report that digital technology was not sufficiently covered in their (vocational) degrees. In general, farmers emphasises the positive effects of digitalisation both on their working as well as private life. 

Conclusions: Farmers should be aware of the opportunities as well as the potential drawbacks of the digitalisation of work processes in agriculture. Providers of agricultural education (like vocational schools or training institutes) need to incorporate the knowledge and skills required to work in digitalised environments (e.g., data literacy) in their syllabi. Further studies are required to assess how digitalisation changes farming practices and what knowledge as well as skills linked to these developments are required in the future.}},
  author       = {{Goller, Michael and Caruso, Carina and Harteis, Christian}},
  issn         = {{2197-8646}},
  journal      = {{International Journal for Research in Vocational Education and Training}},
  keywords     = {{Work-Based Learning, Organisational Change, Digital Competences, Qualitative Research, Digitalisation, Farming, Dairy, VET, Vocational Education and Training}},
  number       = {{2}},
  pages        = {{208–223}},
  title        = {{{Digitalisation in Agriculture: Knowledge and Learning Requirements of German Dairy Farmers}}},
  doi          = {{10.13152/IJRVET.8.2.4.}},
  volume       = {{8}},
  year         = {{2021}},
}

@inproceedings{24280,
  abstract     = {{Challenges in decisions on technical changes are the lack of knowledge about the expected impact and change propagation. Currently, no literature study contains a systematic differentiation and evaluation of existing approaches, which is a prerequisite for practitioners to select a suitable approach. This research aims at defining differentiation criteria as well as generally applicable requirements for evaluation. A four-step approach is used: systematic literature review on approaches for impact analysis of engineering changes (1), categorization and prioritization of approaches based on reoccuring elements (2), derivation of context specific requirements for evaluation (3), and evaluation of approaches (4). The result indicates existing potential of object-oriented modeling approaches.}},
  author       = {{Gräßler, Iris and Wiechel, Dominik}},
  booktitle    = {{DS 111: Proceedings of the 32nd Symposium Design for X}},
  editor       = {{Krause, Dieter and Paetzold, Kristin and Wartzack, Sandro}},
  keywords     = {{Engineering Change Management, Impact Analysis, Engineering  Changes, Model-based Systems Engineering, Product Developmen}},
  location     = {{Tutzing}},
  title        = {{{Systematische Bewertung von Auswirkungsanalysen des Engineering Change Managements}}},
  doi          = {{10.35199/dfx2021.12}},
  year         = {{2021}},
}

@inproceedings{24080,
  abstract     = {{Challenges of the development of mechatronic systems and corresponding production systems have increased steadily. Changes are primarily due to increased product complexity and the connection to the internet of things and services, enabling Cyber-Physical Systems (CPS) and Cyber-Physical Production Systems (CPPS). Major innovations of the revised VDI guideline 2206 for developing mechatronic systems are systems thinking as a core element and six checkpoints for structuring deliverables along the V-Model. These checkpoints serve for orientation in result progress and thus enable a structured and complete development process. However, tasks and checkpoints of the new guideline focus on the product development itself without integrating the development of related CPPS, enabling optimization simultaneously to system development. Implications are derived by a three-step analysis. The paper at hand contributes fundamental extensions of the checkpoint questions regarding integrated CPPS development. These questions provide methodical support for system developers of CPPS for CPS by enabling the project manager to check the status, schedule further development steps and evaluate the maturity of the whole, integrated development.}},
  author       = {{Gräßler, Iris and Wiechel, Dominik and Roesmann, Daniel and Thiele, Henrik}},
  booktitle    = {{Procedia CIRP}},
  issn         = {{2212-8271}},
  keywords     = {{Cyber-Physical Production System (CPPS), V-Model, Product System Development, Integrated Development, VDI 2206}},
  pages        = {{253--258}},
  title        = {{{V-model based development of cyber-physical systems and cyber-physical production systems}}},
  doi          = {{10.1016/j.procir.2021.05.119}},
  year         = {{2021}},
}

@inproceedings{22724,
  abstract     = {{
Predictive Maintenance as a desirable maintenance strategy in industrial applications relies on suitable condition monitoring solutions to reduce costs and risks of the monitored technical systems. In general, those solutions utilize model-based or data-driven methods to diagnose the current state or predict future states of monitored technical systems. However, both methods have their advantages and drawbacks. Combining both methods can improve uncertainty consideration and accuracy. Different combination approaches of those hybrid methods exist to exploit synergy effects. The choice of an appropriate approach depends on different requirements and the goal behind the selection of a hybrid approach.

 

In this work, the hybrid approach for estimating remaining useful lifetime takes potential uncertainties into account. Therefore, a data-driven estimation of new measurements is integrated within a model-based method. To consider uncertainties within the system, a differentiation between different system behavior is realized throughout diverse states of degradation.

The developed hybrid prediction approach bases on a particle filtering method combined with a machine learning method, to estimate the remaining useful lifetime of technical systems. Particle filtering as a Monte Carlo simulation technique is suitable to map and propagate uncertainties. Moreover, it is a state-of-the-art model-based method for predicting remaining useful lifetime of technical systems. To integrate uncertainties a multi-model particle filtering approach is employed. In general, resampling as a part of the particle filtering approach has the potential to lead to an accurate prediction. However, in the case where no future measurements are available, it may increase the uncertainty of the prediction. By estimating new measurements, those uncertainties are reduced within the data-driven part of the approach. Hence, both parts of the hybrid approach strive to account for and reduce uncertainties.

 

Rubber-metal-elements are employed as a use-case to evaluate the developed approach. Rubber-metal-elements, which are used to isolate vibrations in various systems, such as railways, trucks and wind turbines, show various uncertainties in their behavior and their degradation. Those uncertainties are caused by diverse inner and outer factors, such as manufacturing influences and operating conditions. By expert knowledge the influences are described, analyzed and if possible reduced. However, the remaining uncertainties are considered within the hybrid prediction method. Relative temperature is the selected measurand to describe the element’s degradation. In lifetime tests, it is measured as the difference between the element’s temperature and the ambient temperature. Thereby, the influence of the ambient temperature on the element’s temperature is taken into account. Those elements show three typical states of degradation that are identified within the temperature measurements. Depending on the particular state of degradation a new measurement is estimated within the hybrid approach to reduce potential uncertainties.

Finally, the performance of the developed hybrid method is compared to a model-based method for estimating the remaining useful lifetime of the same elements. Suitable performance indices are implemented to underline the differences between the results.}},
  author       = {{Bender, Amelie and Sextro, Walter}},
  booktitle    = {{Proceedings of the European Conference of the PHM Society 2021}},
  editor       = {{Do, Phuc  and King, Steve and Fink,  Olga}},
  keywords     = {{Hybrid prediction method, Multi-model particle filtering, Uncertainty quantification, RUL estimation}},
  number       = {{1}},
  title        = {{{Hybrid Prediction Method for Remaining Useful Lifetime Estimation Considering Uncertainties}}},
  doi          = {{https://doi.org/10.36001/phme.2021.v6i1.2843 }},
  volume       = {{6}},
  year         = {{2021}},
}

@inproceedings{27111,
  abstract     = {{In the industry 4.0 era, there is a growing need to transform unstructured data acquired by a multitude of sources into information and subsequently into knowledge to improve the quality of manufactured products, to boost production, for predictive maintenance, etc. Data-driven approaches, such as machine learning techniques, are typically employed to model the underlying relationship from data. However, an increase in model accuracy with state-of-the-art methods, such as deep convolutional neural networks, results in less interpretability and transparency. Due to the ease of implementation, interpretation and transparency to both domain experts and non-experts, a rule-based method is proposed in this paper, for prognostics and health management (PHM) and specifically for diagnostics. The proposed method utilizes the most relevant sensor signals acquired via feature extraction and selection techniques and expert knowledge. As a case study, the presented method is evaluated on data from a real-world quality control set-up provided by the European prognostics and health management society (PHME) at the conference’s 2021 data challenge. With the proposed method, our team took the third place, capable of successfully diagnosing different fault modes, irrespective of varying conditions.}},
  author       = {{Aimiyekagbon, Osarenren Kennedy and Muth, Lars and Wohlleben, Meike Claudia and Bender, Amelie and Sextro, Walter}},
  booktitle    = {{Proceedings of the European Conference of the PHM Society 2021}},
  editor       = {{Do, Phuc and King, Steve and Fink, Olga}},
  keywords     = {{PHME 2021, Feature Selection Classification, Feature Selection Clustering, Interpretable Model, Transparent Model, Industry 4.0, Real-World Diagnostics, Quality Control, Predictive Maintenance}},
  number       = {{1}},
  pages        = {{527--536}},
  title        = {{{Rule-based Diagnostics of a Production Line}}},
  doi          = {{10.36001/phme.2021.v6i1.3042}},
  volume       = {{6}},
  year         = {{2021}},
}

@article{25212,
  abstract     = {{Finding a good query plan is key to the optimization of query runtime. This holds in particular for cost-based federation
engines, which make use of cardinality estimations to achieve this goal. A number of studies compare SPARQL federation engines across different performance metrics, including query runtime, result set completeness and correctness, number of sources selected and number of requests sent. Albeit informative, these metrics are generic and unable to quantify and evaluate the accuracy of the cardinality estimators of cost-based federation engines. To thoroughly evaluate cost-based federation engines, the effect of estimated cardinality errors on the overall query runtime performance must be measured. In this paper, we address this challenge by presenting novel evaluation metrics targeted at a fine-grained benchmarking of cost-based federated SPARQL query engines. We evaluate five cost-based federated SPARQL query engines using existing as well as novel evaluation metrics by using LargeRDFBench queries. Our results provide a detailed analysis of the experimental outcomes that reveal novel insights, useful for the development of future cost-based federated SPARQL query processing engines.}},
  author       = {{Qudus, Umair and Saleem, Muhammad and Ngonga Ngomo, Axel-Cyrille and Lee, Young-Koo}},
  issn         = {{2210-4968}},
  journal      = {{Semantic Web}},
  keywords     = {{SPARQL, benchmarking, cost-based, cost-free, federated, querying}},
  number       = {{6}},
  pages        = {{843--868}},
  publisher    = {{ISO Press}},
  title        = {{{An Empirical Evaluation of Cost-based Federated SPARQL Query Processing Engines}}},
  doi          = {{10.3233/SW-200420}},
  volume       = {{12}},
  year         = {{2021}},
}

@inproceedings{19606,
  abstract     = {{Mobile shopping apps have been using Augmented Reality (AR) in the last years to place their products in the environment of the customer. While this is possible with atomic 3D objects, there is is still a lack in the runtime conﬁguration of 3D object compositions based on user needs and environmental constraints. For this, we previously developed an approach for model-based AR-assisted product conﬁguration based on the concept of Dynamic Software Product Lines. In this demonstration paper, we present the corresponding tool support ProConAR in the form of a Product Modeler and a Product Conﬁgurator. While the Product Modeler is an Angular web app that splits products (e.g. table) up into atomic parts (e.g. tabletop, table legs, funnier) and saves it within a conﬁguration model, the Product Conﬁgurator is an Android client that uses the conﬁguration model to place diﬀerent product conﬁgurations within the environment of the customer. We show technical details of our ready to use tool-chain ProConAR by describing its implementation and usage as well as pointing out future research directions.}},
  author       = {{Gottschalk, Sebastian and Yigitbas, Enes and Schmidt, Eugen and Engels, Gregor}},
  booktitle    = {{Human-Centered Software Engineering. HCSE 2020}},
  editor       = {{Bernhaupt, Regina and Ardito, Carmelo and Sauer, Stefan}},
  keywords     = {{Product Configuration, Augmented Reality, Model-based, Tool Support}},
  location     = {{Eindhoven}},
  publisher    = {{Springer}},
  title        = {{{ProConAR: A Tool Support for Model-based AR Product Configuration}}},
  doi          = {{10.1007/978-3-030-64266-2_14}},
  volume       = {{12481}},
  year         = {{2020}},
}

@inbook{21542,
  abstract     = {{Using near-field (NF) scan data to predict the far-field (FF) behaviour of radiating electronic systems represents a novel method to accompany the whole RF design process. This approach involves so-called Huygens' box as an efficient radiation model inside an electromagnetic (EM) simulation tool and then transforms the scanned NF measured data into the FF. For this, the basic idea of the Huygens'box principle and the NF-to-FF transformation are briefly presented. The NF is measured on the Huygens' box around a device under test using anNF scanner, recording the magnitude and phase of the site-related magnetic and electric components. A comparison between a fullwave simulation and the measurement results shows a good similarity in both the NF and the simulated and transformed FF.Thus, this method is applicable to predict the FF behaviour of any electronic system by measuring the NF. With this knowledge, the RF design can be improved due to allowing a significant reduction of EM compatibility failure at the end of the development flow. In addition, the very efficient FF radiation model can be used for detailed investigations in various environments and the impact of such an equivalent radiation source on other electronic systems can be assessed.}},
  author       = {{Schröder, Dominik and Lange, Sven and Hangmann, Christian and Hedayat, Christian}},
  booktitle    = {{Tensorial Analysis of Networks (TAN) Modelling for PCB Signal Integrity and EMC Analysis}},
  isbn         = {{9781839530494}},
  keywords     = {{Huygens' box, NF-to-FF transformation, efficient FF radiation model, FF behaviour, EMI assessment, PCB, near-field measurements, efficient radiation model, far-field behaviour, RF design process, far-field prediction, Huygens'box principle, fullwave simulation, electronic system radiation, equivalent radiation source, electromagnetic simulation tool, near-field scan data, EM compatibility failure reduction}},
  pages        = {{315--346 (32)}},
  publisher    = {{ The Institution of Engineering and Technology (IET)}},
  title        = {{{Far-field prediction combining simulations with near-field measurements for EMI assessment of PCBs}}},
  doi          = {{10.1049/pbcs072e_ch14}},
  year         = {{2020}},
}

@inproceedings{16933,
  abstract     = {{The continuous innovation of its business models is an important task for a company to stay competitive. During this process, the company has to validate various hypotheses about its business models by adapting to uncertain and changing customer needs effectively and efficiently. This adaptation, in turn, can be supported by the concept of Software Product Lines (SPLs). SPLs reduce the time to market by deriving products for customers with changing requirements using a common set of features, structured as a feature model. Analogously, we support the process of business model adaptation by applying the engineering process of SPLs to the structure of the Business Model Canvas (BMC). We call this concept a Business Model Decision Line (BMDL). The BMDL matches business domain knowledge in the form of a feature model with customer needs to derive hypotheses about the business model together with experiments for validation. Our approach is effective by providing a comprehensive overview of possible business model adaptations and efficient by reusing experiments for different hypotheses. We implement our approach in a tool and illustrate the usefulness with an example of developing business models for a mobile application.}},
  author       = {{Gottschalk, Sebastian and Rittmeier, Florian and Engels, Gregor}},
  booktitle    = {{Proceedings of the 22nd IEEE International Conference on Business Informatics}},
  keywords     = {{Business Model Decision Line, Business Model Adaptation, Hypothesis-driven Adaptation, Software Product Line, Feature Model}},
  location     = {{Antwerp}},
  publisher    = {{IEEE}},
  title        = {{{Hypothesis-driven Adaptation of Business Models based on Product Line Engineering}}},
  doi          = {{10.1109/CBI49978.2020.00022}},
  year         = {{2020}},
}

@inproceedings{16934,
  abstract     = {{To build successful products, the developers have to adapt their product features and business models to uncertain customer needs. This adaptation is part of the research discipline of Hypotheses Engineering (HE) where customer needs can be seen as hypotheses that need to be tested iteratively by conducting experiments together with the customer. So far, modeling support and associated traceability of this iterative process are missing. Both, in turn, are important to document the adaptation to the customer needs and identify experiments that provide most evidence to the customer needs. To target this issue, we introduce a model-based HE approach with a twofold contribution: First, we develop a modeling language that models hypotheses and experiments as interrelated hierarchies together with a mapping between them. While the hypotheses are labeled with a score level of their current evidence, the experiments are labeled with a score level of maximum evidence that can be achieved during conduction. Second, we provide an iterative process to determine experiments that offer the most evidence improvement to the modeled hypotheses. We illustrate the usefulness of the approach with an example of testing the business model of a mobile application.}},
  author       = {{Gottschalk, Sebastian and Yigitbas, Enes and Engels, Gregor}},
  booktitle    = {{Business Modeling and Software Design}},
  editor       = {{Shishkov, Boris}},
  keywords     = {{Hypothesis Engineering, Model-based, Customer Need Adaptation, Business Model, Product Features}},
  location     = {{Potsdam}},
  pages        = {{276--286}},
  publisher    = {{Springer International Publishing}},
  title        = {{{Model-based Hypothesis Engineering for Supporting Adaptation to Uncertain Customer Needs}}},
  doi          = {{10.1007/978-3-030-52306-0_18}},
  volume       = {{391}},
  year         = {{2020}},
}

@techreport{17019,
  abstract     = {{The scientific impact of research papers is multi-dimensional and can be determined quantitatively by means of citation analysis and qualitatively by means of content analysis. Accounting for the widely acknowledged limitations of pure citation analysis, we adopt a knowledge-based perspective on scientific impact to develop a methodology for content-based citation analysis which allows determining how papers have enabled knowledge development in subsequent research (knowledge impact). As knowledge development differs between research genres, we develop a new knowledgebased citation analysis methodology for the genre of standalone literature reviews (LRs). We apply the suggested methodology to the IS business value domain by manually coding 22 LRs and 1,228 citing papers (CPs) and show that the results challenge the assumption that citations indicate knowledge impact. We derive implications for distinguishing knowledge impact from citation impact in the LR genre. Finally, we develop recommendations for authors of LRs, scientific evaluation committees and editorial boards of journals how to apply and benefit from the suggested methodology, and we discuss its efficiency and automatization.}},
  author       = {{Schryen, Guido and Wagner, Gerit and Benlian, Alexander}},
  keywords     = {{Scientific impact, knowledge impact, content-based citation analysis, methodology}},
  title        = {{{Distinguishing Knowledge Impact from Citation Impact: A Methodology for Analysing Knowledge Impact for the Literature Review Genre}}},
  year         = {{2020}},
}

@inproceedings{15580,
  abstract     = {{This paper deals with aspect phrase extraction and classification in sentiment analysis. We summarize current approaches and datasets from the domain of aspect-based sentiment analysis. This domain detects sentiments expressed for individual aspects in unstructured text data. So far, mainly commercial user reviews for products or services such as restaurants were investigated. We here present our dataset consisting of German physician reviews, a sensitive and linguistically complex field. Furthermore, we describe the annotation process of a dataset for supervised learning with neural networks. Moreover, we introduce our model for extracting and classifying aspect phrases in one step, which obtains an F1-score of 80%. By applying it to a more complex domain, our approach and results outperform previous approaches.}},
  author       = {{Kersting, Joschka and Geierhos, Michaela}},
  booktitle    = {{Proceedings of the 12th International Conference on Agents and Artificial Intelligence (ICAART 2020) --  Special Session on Natural Language Processing in Artificial Intelligence (NLPinAI 2020)}},
  keywords     = {{Deep Learning, Natural Language Processing, Aspect-based Sentiment Analysis}},
  location     = {{Valetta, Malta}},
  pages        = {{391----400}},
  publisher    = {{SCITEPRESS}},
  title        = {{{Aspect Phrase Extraction in Sentiment Analysis with Deep Learning}}},
  year         = {{2020}},
}

