@book{2791,
  editor       = {{Kundisch, Dennis and Suhl, Leena and Beckmann, Lars}},
  location     = {{Paderborn}},
  title        = {{{Tagungsband MKWI 2014 - Multikonferenz Wirtschaftsinformatik}}},
  year         = {{2014}},
}

@inbook{1124,
  abstract     = {{Finding information about people in the World Wide Web is one of the most common activities of Internet users. It is now impossible to manually analyze all this information and new approaches are needed that are capable of processing the large-scale heterogeneous data in order to extract the pertinent information. The Wikipedia community still puts much effort in manually adding structured data to biographical articles, the so-called {{Persondata}} template. Thanks to this kind of metadata, semantically-enriched information concerning the biographee (e.g. name, date of birth, place of birth) can be extracted and processed by search engines. But it is a rather time-consuming task and users quite often forget to add this template: some biographies contain persondata, others do not. There is considerably less work done on developing approaches to automatically enhance English Wikipedia biographies with persondata and therefore improve the quality of structured user contributions. Within this paper, we describe our method to automatically generate persondata from biographical information in Wikipedia articles.}},
  author       = {{Geierhos, Michaela}},
  booktitle    = {{Penser le Lexique-Grammaire}},
  editor       = {{Kakoyianni-Doa, Fryni}},
  isbn         = {{9782745325129}},
  location     = {{Nicosia, Cyprus}},
  pages        = {{411--420}},
  publisher    = {{Honoré Champion}},
  title        = {{{Towards a Local Grammar-based Persondata Generator for Wikipedia Biographies}}},
  year         = {{2014}},
}

@inproceedings{1130,
  abstract     = {{In this paper, we focus on the acronym representation, the concept of abbreviation of major terminology. To this end, we try to find the most efficient method to disambiguate the sense of the acronym. Comparing the various feature types, we found that using single noun (NN) overwhelmingly outperformed noun phrase (NP) base. Moreover, the result also showed that collocation information (CL) was not efficient for enhancing performance considering a huge extra data processing. We expect to apply the open knowledge base Wikipedia to scholarly service to enhance the quality of the local knowledge base and to develop value-added services.}},
  author       = {{Jeong, Do-Heon and Gim, Jangwon and Jung, Hanmin and Geierhos, Michaela and Bäumer, Frederik Simon}},
  booktitle    = {{Conference Proceedings of the 9th Asia Pacific International Conference on Information Science and Technology (APIC-IST 2014)}},
  issn         = {{20930542}},
  location     = {{Kathmandu, Nepal}},
  pages        = {{369--371}},
  title        = {{{Comparative study on disambiguating acronyms in the scientific papers using the open knowledge base}}},
  year         = {{2014}},
}

@inproceedings{1131,
  author       = {{Geierhos, Michaela and Schulze, Sabine}},
  booktitle    = {{Challenges for Consumer Research and Consumer Policy in Europe}},
  location     = {{Bonn, Germany}},
  pages        = {{53}},
  title        = {{{The same but not the same - Challenges in comparing patient opinions}}},
  year         = {{2014}},
}

@inproceedings{1133,
  author       = {{Geierhos, Michaela and Siri, Jasmin}},
  booktitle    = {{Tagungsband Forschungsethik in der qualitativen und quantitativen Sozialforschung}},
  location     = {{Munich, Germany}},
  pages        = {{29}},
  title        = {{{Was beobachtet die Forschungsethik? Eine interdisziplinäre Diskussion zwischen Computerlinguistik und qualitativ-konstruktivistischer Sozialforschung}}},
  year         = {{2014}},
}

@inproceedings{1134,
  abstract     = {{This paper focuses on the first step in combining prescriptive analytics with scenario techniques in order to provide strategicdevelopment after the useof InSciTe, a data prescriptive analytics application. InSciTe supports the improvement of researchers‘ individual performance by recommending new research directions. Standardized influential factors are presented as a foundation for automated scenario modelling such as the prototypical report generation function of InSciTe. Additionally, a use-case is shown which validatesthe potential of the standardized influential factors for raw scenario development.}},
  author       = {{Weber, Jens and Minhee, Cho and Lee, Mikyoung and Song, Sa-kwang and Geierhos, Michaela and Jung, Hanmin}},
  booktitle    = {{Proceedings of the First International Workshop on Patent Mining and Its Applications (IPaMin 2014) co-located with Konvens 2014}},
  editor       = {{Jung, Hanmin and Mandl, Thomas and Womsen-Hacker, Christa and Xu, Shuo}},
  issn         = {{16130073}},
  keywords     = {{Standardized Influential Factors, Prescriptive Analytics, Role Model Group, Scenario Technique}},
  location     = {{Hildesheim, Germany}},
  publisher    = {{CEUR-WS.org}},
  title        = {{{System Thinking: Crafting Scenarios for Prescriptive Analytics}}},
  volume       = {{1292}},
  year         = {{2014}},
}

@inproceedings{1135,
  abstract     = {{In this paper, we describe our system developed for the GErman SenTiment AnaLysis shared Task (GESTALT) for participation in the Maintask 2: Subjective Phrase and Aspect Extraction from Product Reviews. We present a tool, which identifies subjective and aspect phrases in German product reviews. For the recognition of subjective phrases, we pursue a lexicon-based approach. For the extraction of aspect phrases from the reviews, we consider two possible ways: Besides the subjectivity and aspect look-up, we also implemented a method to establish which subjective phrase belongs to which aspect. The system achieves better results for the recognition of aspect phrases than for the subjective identification.}},
  author       = {{Dollmann, Markus and Geierhos, Michaela}},
  booktitle    = {{Workshop Proceedings of the 12th Edition of the KONVENS Conference}},
  editor       = {{Faaß, Gertrud and Ruppenhofer, Josef}},
  isbn         = {{978-3-934105-47-8}},
  keywords     = {{corpus linguistics, sentiment analysis}},
  location     = {{Hildesheim, Germany}},
  pages        = {{185--191}},
  publisher    = {{Universitätsverlag Hildesheim}},
  title        = {{{SentiBA: Lexicon-based Sentiment Analysis on German Product Reviews}}},
  year         = {{2014}},
}

@inproceedings{1137,
  abstract     = {{In this paper, we present a system which makes scientific data available following the linked open data principle using standards like RDF and URI as well as the popular D2R server (D2R) and the customizable D2RQ mapping language. Our scientific data sets include acronym data and expansions, as well as researcher data such as author name, affiliation, coauthors, and abstracts. The system can easily be extended to other records. Regarding this, a domain adaptation to patent mining seems possible. For this reason, obvious similarities and differences are presented here. The data set is collected from several different providers like publishing houses and digital libraries, which follow different standards in data format and structure. Most of them are not supporting semantic web technologies, but the legacy HTML standard. The integration of these large amounts of scientific data into the Semantic Web is challenging and it needs flexible data structures to access this information and interlink them. Based on these data sets, we will be able to derive a general technology trend as well as the individual research domain for each researcher. The goal of our Linked Open Data System for scientific data is to provide access to this data set for other researchers using the Web of Linked Data. Furthermore we implemented an application for visualization, which allows usto explorethe relations between single data sets.}},
  author       = {{Bäumer, Frederik Simon and Gim, Jangwon and Jeong, Do-Heon and Geierhos, Michaela and Jung, Hanmin}},
  booktitle    = {{Proceedings of the First International Workshop on Patent Mining and Its Applications (IPaMin 2014) co-located with Konvens 2014}},
  editor       = {{Jung, Hanmin and Mandl, Thomas and Womsen-Hacker, Christa and Xu, Shuo}},
  issn         = {{16130073}},
  keywords     = {{Linked Open Data, Researcher Data, Acronym Data, D2R}},
  location     = {{Hildesheim, Germany}},
  publisher    = {{CEUR-WS.org}},
  title        = {{{Linked Open Data System for Scientific Data Sets}}},
  volume       = {{1292}},
  year         = {{2014}},
}

@inproceedings{1140,
  abstract     = {{Customized planning, engineering and build-up of factory plants are very complex tasks, where project management contains lots of risks and uncertainties. Existing simulation techniques could help massively to evaluate these uncertainties and achieve improved and at least more robust plans during project management, but are typically not applied in industry, especially at SMEs (small and medium-sized enterprises). This paper presents some results of the joint research project simject of the Universities of Paderborn and Kassel, which aims at the development of a demonstrator for a simulation-based and logistic-integrated project planning and scheduling. Based on the researched state-of-the-art, requirements and a planning process are derived and described, as well as a draft of the current technical infrastructure of the intended modular prototype. First plug-ins for project simulation and multi-project optimization are implemented and already show possible benefits for the project management process.}},
  author       = {{Gutfeld, Thomas and Jessen, Ulrich and Wenzel, Sigrid and Weber, Jens}},
  booktitle    = {{Proceedings of the 2014 Winter Simulation Conference}},
  editor       = {{Tolk, Andreas  and Diallo, Saikou Y. and Ryzhov, Ilya O. and Yilmaz, Levent and Buckley, Stephen J. and Miller, John A.}},
  isbn         = {{9781479974863}},
  location     = {{Savannah, GA, USA}},
  pages        = {{3423--3434}},
  publisher    = {{IEEE Press}},
  title        = {{{A Technical Concept for Plant Engineering by Simulation-Based and Logistic-Integrated Project Management}}},
  doi          = {{10.1109/WSC.2014.7020175}},
  year         = {{2014}},
}

@article{20944,
  abstract     = {{Joining metals using electrochemical support (ECUF) is a new process for cold pressure welding sheets and parts. This new process is based on an electrochemical in-line surface treatment followed by incremental pilger rolling. The ECUF process intends to cold pressure weld materials under optimized conditions. Oxide layers on metal surfaces are known to inhibit the formation of cold pressure welds. The in-line electrochemical treatment will be used to remove these surface oxides for specific engineering metals and alloys. Hence, an improved pressure weld formation at lower forces and smaller reduction ratios is expected for the electrochemically treated surfaces. Using a more flexible pressure welding process, the number of applications could be greatly improved. First tests with copper were performed to analyse the efficiency of the proposed electrochemical surface treatments. Two electrochemical treatments, the cathodic oxide-reduction and cyclovoltammetric oxide-reduction, were compared with conventional treatments (degreasing and scratch brushing) regarding their influence on the cold pressure welding process of copper. The weld strength of lap welds has been investigated as well as the necessary reduction threshold to form a weld. It was found that the electrochemical oxide reduction resulted in higher weld strength. The results of scanning electron microscopy (SEM) and energy dispersive analysis of X-rays (EDX) indicate that surface oxides were successfully removed by the electrochemical surface treatments. (C) 2014 Elsevier B.V. All rights reserved.}},
  author       = {{Ebbert, Christoph and Schmidt, H. C. and Rodman, D. and Nuernberger, F. and Homberg, W. and Maier, H. J. and Grundmeier, Guido}},
  issn         = {{0924-0136}},
  journal      = {{JOURNAL OF MATERIALS PROCESSING TECHNOLOGY}},
  number       = {{10}},
  pages        = {{2179--2187}},
  title        = {{{Joining with electrochemical support (ECUF): Cold pressure welding of copper}}},
  doi          = {{10.1016/j.jmatprotec.2014.04.015}},
  volume       = {{214}},
  year         = {{2014}},
}

@inproceedings{20972,
  abstract     = {{The use of models in requirements engineering (RE) for software-intensive embedded systems is considered beneficial. The main advantages of requirements models as documentation format are that they facilitate requirements understanding and foster automatic analysis techniques. However, natural language (NL) is still the dominant documentation format for requirements specifications, particularly in the domain of embedded systems. This is due to the facts that NL-based requirements can be used within legally binding documents and are more appropriate for reviews than models. In order to bridge the gap between both of these documentation formats, this paper proposes a model-driven RE methodology that makes use of requirements models along with a controlled natural language. The methodology combines the advantages of model-based and NL-based documentation by means of a bidirectional multi-step model transformation between both documentation formats. We illustrate the approach by means of an automotive example, explain the particular steps of the model transformation, and present performance results.
}},
  author       = {{Fockel, Markus and Holtmann, Jörg}},
  booktitle    = {{2014 IEEE 4th International Model-Driven Requirements Engineering Workshop (MoDRE)}},
  isbn         = {{9781479963430}},
  title        = {{{A requirements engineering methodology combining models and controlled natural language}}},
  doi          = {{10.1109/modre.2014.6890827}},
  year         = {{2014}},
}

@inproceedings{20981,
  abstract     = {{Real-time embedded systems (RTES), as in the automotive domain, provide their functionality by executing software operations on hardware with restricted resources and by communicating via buses. The properties of the underlying architecture, i.e., execution times of software operations and bus latencies, cause delays during the provision of the functionality. At the same time, RTES have to fulfill strict real-time requirements. The fulfillment of such real-time requirements under consideration of delays induced by architectural properties should be taken into account already during requirements engineering (RE) to avoid costly iterations in subsequent development phases. In previous work, we developed a formal RE approach based on a recent Live Sequence Chart (LSC) variant, so-called Modal Sequence Diagrams (MSDs). This scenario-based RE approach allows to validate the requirements by means of simulation, i.e., the play-out algorithm originally conceived for LSCs. Our MSD play-out approach considers assumptions on the environment as well as real-time requirements and is applicable to hierarchical component architectures, which makes it well suited for automotive systems. However, delays induced by architectural properties are not considered. In order to consider this important aspect, we introduce in this paper an approach enabling the annotation of software operation execution times and connector latencies to hierarchical component architectures by means of the MARTE profile. These assumptions about the architectural properties can be verified against the real-time requirements specified in the MSDs by means of simulation. We illustrate the approach by means of an example of an automotive RTES.}},
  author       = {{Holtmann, Jörg and Shipchanov, Dimitar}},
  booktitle    = {{Proceedings of 12th Workshop Automotive Software Engineering}},
  pages        = {{2169–2180}},
  publisher    = {{Bonner Koellen Verlag}},
  title        = {{{Considering Architectural Properties in Real-time Play-out}}},
  volume       = {{P-232}},
  year         = {{2014}},
}

@inbook{20982,
  abstract     = {{Real-time software-intensive embedded systems complexity, as in the automotive domain, requires rigorous Requirements Engineering (RE) approaches. Scenario-based RE formalisms like Modal Sequence Diagrams (MSDs) enable an intuitive specication and the simulative validation of functional requirements. However, the dependencies between events occurring in different MSD scenarios are implicit so that it is difficult to find causes of requirements defects, if any. The automotive architecture description language EAST-ADL addresses this problem by relying on event chains, which make dependencies between events explicit. However, EAST-ADL event chains have a low abstraction level, and their relationship to functional requirements has seldom been investigated. Based on the EAST-ADL functional architecture, we propose to use its central notion of event to conciliate both approaches. We conceived an automatic transformation from the high abstraction level requirements specified in MSDs to the low abstraction level event chains.
}},
  author       = {{Koch, Thorsten and Holtmann, Jörg and DeAntoni, Julien}},
  booktitle    = {{Software Architecture}},
  isbn         = {{9783319099699}},
  issn         = {{0302-9743}},
  title        = {{{Generating EAST-ADL Event Chains from Scenario-Based Requirements Specifications}}},
  doi          = {{10.1007/978-3-319-09970-5_14}},
  year         = {{2014}},
}

@inproceedings{20983,
  abstract     = {{In many areas, such as automotive, healthcare, or production, we find software-intensive systems with complex real-time requirements. To efficiently ensure the quality of these systems, engineers require automated tools for the validation of the requirements throughout the development. This, however, requires that the requirements are specified in an analyzable way. We propose modeling the specification using Modal Sequence Diagrams (MSDs), which express what a system may, must, or must not do in certain situations. MSDs can be executed via the play-out algorithm to investigate the behavior emerging from the interplay of multiple scenarios; we can also test if traces of the final product satisfy all scenarios. In this paper, we present the first tool supporting the play-out of MSDs with real-time constraints. As a case study, we modeled the requirements on gear shifts in an upcoming standard on vehicle testing and use our tool to validate externally generated gear shift sequences.}},
  author       = {{Brenner, Christian and Greenyer, Joel and Holtmann, Jörg and Liebel, Grischa and Stieglbauer, Gerald and Tichy, Matthias}},
  booktitle    = {{Proceedings of the 13th International Workshop on Graph Transformation and Visual Modeling Techniques (GT-VMT 2014)}},
  title        = {{{ScenarioTools Real-Time Play-Out for Test Sequence Validation in an Automotive Case Study}}},
  year         = {{2014}},
}

@article{21039,
  author       = {{Brecht, Benjamin and Eckstein, Andreas and Ricken, Raimund and Quiring, Viktor and Suche, Hubertus and Sansoni, Linda and Silberhorn, Christine}},
  issn         = {{1050-2947}},
  journal      = {{Physical Review A}},
  title        = {{{Demonstration of coherent time-frequency Schmidt mode selection using dispersion-engineered frequency conversion}}},
  doi          = {{10.1103/physreva.90.030302}},
  volume       = {{90}},
  year         = {{2014}},
}

@article{21040,
  author       = {{Krapick, S. and Stefszky, M. S. and Jachura, M. and Brecht, B. and Avenhaus, M. and Silberhorn, C.}},
  issn         = {{1050-2947}},
  journal      = {{Physical Review A}},
  title        = {{{Bright integrated photon-pair source for practical passive decoy-state quantum key distribution}}},
  doi          = {{10.1103/physreva.89.012329}},
  volume       = {{89}},
  year         = {{2014}},
}

@inbook{21211,
  author       = {{Herzig, Bardo and Aßmann, Sandra and Klar, Tilman-Mathies}},
  booktitle    = {{Grundbildung Medien in pädagogischen Studiengängen}},
  editor       = {{Imort, Peter and Niesyto, Horst}},
  pages        = {{65--80}},
  publisher    = {{kopaed}},
  title        = {{{Grundbildung Medien im Profilstudium im Lehramt}}},
  year         = {{2014}},
}

@inbook{21357,
  author       = {{Mirbabaie, Milad and Ehnis, Christian and Stieglitz, Stefan and Bunker, Deborah}},
  booktitle    = {{Information systems and global assemblages: (re)configuring actors, artefacts, organizations: IFIP WG 8.2 Working Conference on Information Systems}},
  editor       = {{Doolin, Bill and Lamprou, Eleni and Mitev, Nathalie and McLeod, Laurie}},
  location     = {{Auckland, New Zealand}},
  pages        = {{207--218}},
  publisher    = {{Springer}},
  title        = {{{Communication roles in public events: a case study on Twitter communications}}},
  year         = {{2014}},
}

@inproceedings{21358,
  author       = {{Stieglitz, Stefan and Brockmann, Tobias and Mirbabaie, Milad}},
  booktitle    = {{Proceedings of the 25th Australasian Conference on Information Systems}},
  isbn         = {{978-1-927184-26-4}},
  location     = {{Auckland, New Zealand}},
  title        = {{{How context impacts on media choice}}},
  year         = {{2014}},
}

@inproceedings{21361,
  author       = {{Ehnis, Christian and Mirbabaie, Milad and Bunker, Deborah and Stieglitz, Stefan}},
  booktitle    = {{Proceedings of the 25th Australasian Conference on Information Systems}},
  isbn         = {{978-1-927184-26-4}},
  location     = {{Auckland, New Zealand}},
  title        = {{{The role of social media network participants in extreme events}}},
  year         = {{2014}},
}

