@inbook{3632,
  author       = {{Gruber, Hans and Harteis, Christian}},
  booktitle    = {{The SAGE Handbook of Workplace Learning}},
  editor       = {{Malloch, Marg and Cairns, Len and O'Connor, Bridget}},
  pages        = {{224----235}},
  publisher    = {{Sage}},
  title        = {{{Researching workplace learning in Europe}}},
  year         = {{2011}},
}

@book{28933,
  author       = {{Davis, Niki and Eickelmann, Birgit and Patru, Mariana and Schulz-Zander, Renate and Dzvimbo, Peter}},
  publisher    = {{UNESCO/Kennisnet}},
  title        = {{{EduSummIT 2011: Restructuring educational systems to move into the digital age}}},
  year         = {{2011}},
}

@inproceedings{3137,
  author       = {{Hofheinz, Dennis and Jager, Tibor and Kiltz, Eike}},
  booktitle    = {{Advances in Cryptology - ASIACRYPT 2011 - 17th International Conference on the Theory and Application of Cryptology and Information Security, Seoul, South Korea, December 4-8, 2011. Proceedings}},
  pages        = {{647----666}},
  title        = {{{Short Signatures from Weaker Assumptions}}},
  doi          = {{10.1007/978-3-642-25385-0_35}},
  year         = {{2011}},
}

@article{27744,
  abstract     = {{<jats:p>The aim of the present study was to examine the association of pre-pubertal dietary energy density (ED) with both age and body fatness at the start of the pubertal growth spurt (age at take-off, ATO). Analyses included 219 DOrtmund Nutritional and Anthropometric Longitudinally Designed Study participants with sufficient height measurements to estimate ATO who provided 3 d weighed dietary records at baseline, i.e. 2 and 3 years before ATO (mean age 6·9 (<jats:sc>sd</jats:sc> 1·2) years). Mean energy intakes and amounts of foods/drinks consumed at baseline were derived from the records. ED (kJ/g) was calculated based on (1) all foods and drinks (ED_all), (2) foods and energy-containing drinks (ED_energy), (3) foods and milk as a drink, but no other beverages (ED_milk) and (4) foods only, solid or liquid (ED_food). Using multiple regression analyses, the association between the ED variables and ATO was investigated. Furthermore, <jats:italic>Z</jats:italic>-scores of BMI and fat mass index (FMI) at ATO were considered as outcomes to reflect body fatness at puberty onset. The results showed that ED at baseline was not associated with ATO, regardless of the ED method used. For example, mean ATO in the lowest <jats:italic>v.</jats:italic> highest tertile of ED_food was 9·3 (95 % CI 9·0, 9·5) <jats:italic>v.</jats:italic> 9·4 (95 % CI 9·1, 9·7) years, <jats:italic>P</jats:italic><jats:sub>trend</jats:sub> = 0·8 (adjusted for sex, maternal age, birth weight, dietary protein, dietary fibre, baseline BMI <jats:italic>Z</jats:italic>-score). Similarly, ED was not independently associated with BMI or FMI <jats:italic>Z</jats:italic>-score at ATO (<jats:italic>P</jats:italic><jats:sub>trend</jats:sub> = 0·3–0·9). In conclusion, dietary ED in childhood did not influence timing or body fatness at ATO in this cohort of healthy, free-living children.</jats:p>}},
  author       = {{Günther, Anke L. B. and Stahl, Lisa J. and Buyken, Anette and Kroke, Anja}},
  issn         = {{0007-1145}},
  journal      = {{British Journal of Nutrition}},
  pages        = {{345--349}},
  title        = {{{Association of dietary energy density in childhood with age and body fatness at the onset of the pubertal growth spurt}}},
  doi          = {{10.1017/s0007114511001772}},
  year         = {{2011}},
}

@inproceedings{17318,
  author       = {{Brennecke, Andreas and Oevel, Gudrun and Strothmann, A.}},
  booktitle    = {{4. DFN-Forum Kommunikationstechnologien – Beiträge der Fachtagung 20./21.6.2011 Bonn}},
  editor       = {{Müller, P. and Neumair, B. and Dreo Rodosek, G.}},
  location     = {{Bonn}},
  pages        = {{69–78}},
  publisher    = {{Gesellschaft für Informatik}},
  title        = {{{Vom Studiolo zur virtuellen Forschungsumgebung}}},
  volume       = {{187}},
  year         = {{2011}},
}

@inproceedings{1895,
  author       = {{Kniesburges, Sebastian and Koutsopoulos, Andreas and Scheideler, Christian}},
  booktitle    = {{SPAA 2011: Proceedings of the 23rd Annual ACM Symposium on Parallelism in Algorithms and Architectures, San Jose, CA, USA, June 4-6, 2011 (Co-located with FCRC 2011)}},
  isbn         = {{978-1-4503-0743-7}},
  pages        = {{235----244}},
  title        = {{{Re-Chord: a self-stabilizing chord overlay network}}},
  doi          = {{10.1145/1989493.1989527}},
  year         = {{2011}},
}

@inbook{19273,
  author       = {{Eke, Norbert Otto}},
  booktitle    = {{Terror und Form (Limbus. Australisches Jahrbuch für germanistische Literatur- und Kulturwissenschaft. Band 4)}},
  editor       = {{Deiters, Franz-Josef and a., u.}},
  pages        = {{13--28}},
  publisher    = {{Rombach}},
  title        = {{{Spiegel/Bilder – Werner Fritschs Anatomie von Terror und Krieg}}},
  year         = {{2011}},
}

@inproceedings{22363,
  author       = {{Priesterjahn, Claudia and Sondermann-Wölke, Christoph and Tichy, Matthias and Hölscher, Christian}},
  booktitle    = {{14th IEEE International Symposium on Object/Component/Service-Oriented Real-Time Distributed Computing Workshops}},
  isbn         = {{978-1-4577-0303-4}},
  pages        = {{80--87}},
  publisher    = {{: IEEE Computer Society }},
  title        = {{{Component-Based Hazard Analysis for Mechatronic Systems}}},
  doi          = {{10.1109/ISORCW.2011.19}},
  volume       = {{14}},
  year         = {{2011}},
}

@misc{22938,
  author       = {{Gausemeier, Jürgen and Dumitrescu, Roman and Kahl, Sascha and Nordsiek, Daniel}},
  booktitle    = {{Robotics and Computer Integrated Manufacturing 27(4)}},
  pages        = {{772--778}},
  title        = {{{Integrative Development of Product and Production System}}},
  year         = {{2011}},
}

@inproceedings{800,
  author       = {{Biermann, Thorsten and Scalia, Luca and Karl, Holger}},
  booktitle    = {{Proceedings of the 14th International Symposium on Modeling Analysis and Simulation of Wireless and Mobile Systems, MSWiM 2011, Miami, Florida, USA, October 31 - November 4, 2011}},
  pages        = {{265----274}},
  title        = {{{Designing optical metro and access networks for future cooperative cellular systems}}},
  doi          = {{10.1145/2068897.2068945}},
  year         = {{2011}},
}

@misc{643,
  author       = {{Welp, Daniel}},
  publisher    = {{Universität Paderborn}},
  title        = {{{User-space Scheduling for Heterogeneous System under Linux}}},
  year         = {{2011}},
}

@inproceedings{645,
  abstract     = {{In the standard consensus problem there are n processes with possibly di®erent input values and the goal is to eventually reach a point at which all processes commit to exactly one of these values. We are studying a slight variant of the consensus problem called the stabilizing consensus problem [2]. In this problem, we do not require that each process commits to a ¯nal value at some point, but that eventually they arrive at a common, stable value without necessarily being aware of that. This should work irrespective of the states in which the processes are starting. Our main result is a simple randomized algorithm called median rule that, with high probability, just needs O(logmlog log n + log n) time and work per process to arrive at an almost stable consensus for any set of m legal values as long as an adversary can corrupt the states of at most p n processes at any time. Without adversarial involvement, just O(log n) time and work is needed for a stable consensus, with high probability. As a by-product, we obtain a simple distributed algorithm for approximating the median of n numbers in time O(logmlog log n + log n) under adversarial presence.}},
  author       = {{Doerr, Benjamin and Goldberg, Leslie Ann and Minder, Lorenz and Sauerwald, Thomas and Scheideler, Christian}},
  booktitle    = {{Proceedings of the 23rd ACM Symposium on Parallelism in Algorithms and Architectures (SPAA)}},
  pages        = {{149--158}},
  title        = {{{Stabilizing consensus with the power of two choices}}},
  doi          = {{10.1145/1989493.1989516}},
  year         = {{2011}},
}

@article{7354,
  abstract     = {{Acceptance testing is a time-consuming task for complex software systems that have to fulfill a large number of requirements. To reduce this effort, we have developed a widely automated method for deriving test plans from requirements that are expressed in natural language. It consists of three stages: annotation, clustering, and test plan specification. The general idea is to exploit redundancies and implicit relationships in requirements specifications. Multi-viewpoint techniques based on RM-ODP (Reference Model for Open Distributed Processing) are employed for specifying the requirements. We then use linguistic analysis techniques, requirements clustering algorithms, and pattern-based requirements collection to reduce the total effort of testing against the requirements specification. In particular, we use linguistic analysis for extracting and annotating the actor, process and object of a requirements statement. During clustering, a similarity function is computed as a measure for the overlap of requirements. In the test plan specification stage, our approach provides capabilities for semi-automatically deriving test plans and acceptance criteria from the clustered informal textual requirements. Two patterns are applied to compute a suitable order of test activities. The generated test plans consist of a sequence of test steps and asserts that are executed or checked in the given order. We also present the supporting prototype tool TORC, which is available open source. For the evaluation of the approach, we have conducted a case study in the field of acceptance testing of a national electronic identification system. In summary, we report on lessons learned how linguistic analysis and clustering techniques can help testers in understanding the relations between requirements and for improving test planning.}},
  author       = {{Güldali, Baris and Funke, Holger and Sauer, Stefan and Engels, Gregor}},
  issn         = {{0963-9314}},
  journal      = {{Software Quality Journal}},
  number       = {{4}},
  pages        = {{771--799}},
  publisher    = {{Springer Nature}},
  title        = {{{TORC: test plan optimization by requirements clustering}}},
  doi          = {{10.1007/s11219-011-9149-4}},
  volume       = {{19}},
  year         = {{2011}},
}

@article{4379,
  abstract     = {{Uniform mesoporous Si double layers are formed on 4 inch p-type < 100> wafers with an off orientation of 6º towards < 111> by means of electrochemical etching in ethanoic-based HF electrolytes. These substrates are of interest for the epitaxial growth of III–V compound semiconductor stacks on their top for the production of multi-junction solar cells and very thin electronic devices. We demonstrate transfer of porous layers after an annealing process in hydrogen atmosphere. Electron Back-Scatter Diffraction analysis confirms that the substrate orientation is conserved during the etching and annealing steps. Confocal μ-Raman spectroscopy analysis shows a decrease in the Raman signal intensity after etching and a subsequent increase after annealing while no shift is observed. By means of Atomic Force Microscopy, analysis the surface appearance after the etching and annealing steps can be visualized. The mean surface roughness varies during the process from 0.55 nm for the unprocessed wafers to 0.27 nm after etching and 0.78 nm after annealing. The decrease of average roughness after etching is caused by an electropolishing step prior to porous formation. Despite of slight increase of mean surface roughness after annealing the samples are still appropriate for high quality epitaxial growth and subsequent lift-off.}},
  author       = {{Garralaga Rojas, E. and Terheiden, B. and Plagwitz, H. and Hensen, J. and Wiedemeier, V. and Berth, Gerhard and Zrenner, Artur and Brendel, R.}},
  issn         = {{0040-6090}},
  journal      = {{Thin Solid Films}},
  keywords     = {{Porous Si, Layer transfer, Thin-film, Photovoltaics}},
  number       = {{1}},
  pages        = {{606--609}},
  publisher    = {{Elsevier BV}},
  title        = {{{Lift-off of mesoporous layers by electrochemical etching on Si (100) substrates with miscut of 6° off towards (111)}}},
  doi          = {{10.1016/j.tsf.2011.07.063}},
  volume       = {{520}},
  year         = {{2011}},
}

@inbook{5173,
  abstract     = {{Im Zuge der in den USA durch die exzessive Vergabe von Subprime-Krediten und deren Verbriefung ausgelöste Finanzkrise und der daraus folgenden weltweiten Wirtschaftskrise hat sich gezeigt, dass das bisherige System der Regulierung bzw. Überwachung von Finanzinstitutionen nicht ausreichend ist. Sowohl in den USA als auch in der Europäischen Union wird deshalb intensiv über ein neues System zur Regulierung von Finanzinstituten nachgedacht. Es ist zwar offen-sichtlich, dass eine wesentliche Ursache der Finanzkrise in den USA die Ermunterung der Finanzinstitute durch den Staat zur Vergabe von Immobilienkrediten an nicht ausreichend solvente Konsumenten war. Die vorhandenen Regulierungsinstrumente konnten die Krise jedoch nicht verhindern bzw. ausreichen abmildern. Das vorliegende Papier setzt sich zum Ziel, die Vorschläge zur Reform der Bankenregulierung zu evaluieren und insbesondere auf ihre potenziellen Wirkungen im Hinblick auf den Wettbewerb im Bankensektor zu überprüfen.

Im Zuge der Diskussion über mögliche Lösungen existieren verschiedenste Vorschläge, die von Änderungen in Rechnungslegungsvorschriften über die Neuordnung der nationalen bzw. internationalen Finanzaufsicht bis hin zu einer stärkeren Berücksichtigung systemischer Risiken gehen. Die Auswirkungen der Finanzkrise haben gezeigt, dass die Abkehr von herkömmlichen Rechnungslegungssystemen hin zu Regeln der internationalen Rechnungslegung unter dem Schlagwort „Fair Value Accounting“ durchaus Gefahren in sich bergen kann. Die Debatte dreht sich insbesondere um die Frage, ob die neuen Rechnungslegungsvorschriften maßgeblich dazu beitragen, das regulatorische Eigenkapital einer Bank in Boomphasen zu erhöhen und in konjunkturellen Schwächephasen dagegen zu senken. In der Literatur ist in Hinblick auf diese Frage bislang keine einhellige Auffassung zu identifizieren. Pellens et al. (2009) gelangen zu dem Ergebnis, dass bankenaufsichtsrechtliche Normen an das IFRS-Zahlenwerk gebunden sind und dies für regulatorische Entscheidungen als besonders kritisch zu sehen ist. Durch das Konzept des Fair-Value-Accounting wurde das regulatorische Eigenkapital im Zeitraum von 2003 bis 2007 systematisch erhöht, woraus ein gestiegener Kreditvergabespielraum resultierte. Demgegenüber kommen Laux und Leuz (2009) im Rahmen einer empirischen Studie anhand von Mikrodaten aus den USA zu dem Ergebnis, dass Fair-Value-Accounting nur zum geringen Teil zu den Problemen der US Banken beigetragen hat und es im Rahmen ihrer Studie keine Evidenz für wesentliche Effekte dieses Rechnungslegungsverfahrens auf die massiven Abschreibungen der US Banken gibt.

Beide Studien betonen aber auch, dass weitere empirische Forschung notwendig ist, bevor diese Frage abschließend geklärt werden kann. Die Frage des adäquaten Rechnungslegungssystems zeigt, wie vielfältig die Ursachen der gegenwärtigen Finanzkrise sind und wie schwer tatsächliche kausale Effekte zu identifizieren sind. Im folgenden Abschnitt werden zunächst die ordnungspolitischen Rahmenbedingungen einer angemessenen Bankenregulierung dargestellt. Der dritte Abschnitt umfasst einen kurzen Überblick über den Verlauf der Finanzkrise. In Abschnitt 4 diskutieren wir die wesentlichen Vorschläge zur Optimierung der Regulierung von Finanzinstituten. Abschnitt 5 evaluiert diese Vorschläge auch aus einer wettbewerbsökonomischen Perspektive. Diese Perspektive ist aus unserer Sicht vor allen Dingen notwendig, weil eine großer Teil der Reformdebatte aus Sicht der Finanzmarktstabilität geführt wird und infolgedessen Wettbewerbseffekte vernachlässigt werden. Das Fazit in Abschnitt 6 fasst unsere Ergebnisse zusammen und gibt einen Ausblick auf weiteren Forschungsbedarf aus Sicht der Wettbewerbsökonomie.}},
  author       = {{Haucap, Justus and Heimeshoff, Ulrich and Uhde, André}},
  booktitle    = {{Die aktuelle Finanzkrise: Bestandsaufnahme und Lehren für die Zukunft, Schriften zur Ordnungsfragen der Wirtschaft}},
  editor       = {{Michler, Albrecht F. and Smeets, Heinz D.}},
  isbn         = {{978-3828205383}},
  pages        = {{185--208}},
  publisher    = {{De Gruyter Oldenbourg}},
  title        = {{{Zur Neuregulierung des Bankensektors nach der Finanzkrise: Bewertung der Reformvorhaben der EU aus ordnungspolitischer Sicht}}},
  volume       = {{93}},
  year         = {{2011}},
}

@inproceedings{646,
  abstract     = {{This paper presents a dynamic overlay network based on the De Bruijn graph which we call Linearized De Bruijn (LDB) network. The LDB network has the advantage that it has a guaranteed constant node degree and that the routing between any two nodes takes at most O(log n) hops with high probability. Also, we show that there is a simple local-control algorithm that can recover the LDB network from any network topology that is weakly connected.}},
  author       = {{Richa, Andrea W. and Scheideler, Christian}},
  booktitle    = {{Proceedings of the 13th International Symposium on Stabilization, Safety, and Security of Distributed Systems (SSS)}},
  pages        = {{416--430}},
  title        = {{{Self-Stabilizing DeBruijn Networks}}},
  doi          = {{10.1007/978-3-642-24550-3_31}},
  year         = {{2011}},
}

@inproceedings{657,
  abstract     = {{We present two distributed, constant factor approximation algorithms for the metric facility location problem. Both algorithms have been designed with a strong emphasis on applicability in the area of wireless sensor networks: in order to execute them, each sensor node only requires limited local knowledge and simple computations. Also, the algorithms can cope with measurement errors and take into account that communication costs between sensor nodes do not necessarily increase linearly with the distance, but can be represented by a polynomial. Since it cannot always be expected that sensor nodes execute algorithms in a synchronized way, our algorithms are executed in an asynchronous model (but they are still able to break symmetry that might occur when two neighboring nodes act at exactly the same time). Furthermore, they can deal with dynamic scenarios: if a node moves, the solution is updated and the update affects only nodes in the local neighborhood. Finally, the algorithms are robust in the sense that incorrect behavior of some nodes during some round will, in the end, still result in a good approximation. The first algorithm runs in expected O(log_{1+\epsilon} n) communication rounds and yields a \my^4(1+4\my^2(1+\epsilon)^{1/p})^p approximation, while the second has a running time of expected O(log^2_{1+\epsilon} n) communication rounds and an approximation factor of \my^4(1 + 2(1 + \epsilon)^{1/p})^p. Here, \epsilon > 0 is an arbitrarily small constant, p the exponent of the polynomial representing the communication costs, and \my the relative measurement error.}},
  author       = {{Abshoff, Sebastan and Cord-Landwehr, Andreas and Degener, Bastian and Kempkes, Barbara  and Pietrzyk, Peter}},
  booktitle    = {{Proceedings of the 7th International Symposium on Algorithms for Sensor Systems, Wireless Ad Hoc Networks and Autonomous Mobile Entities (ALGOSENSORS)}},
  pages        = {{13--27}},
  title        = {{{Local Approximation Algorithms for the Uncapacitated Metric Facility Location Problem in Power-Aware Sensor Networks}}},
  doi          = {{10.1007/978-3-642-28209-6_3}},
  year         = {{2011}},
}

@misc{659,
  author       = {{Liske, Gennadij}},
  publisher    = {{Universität Paderborn}},
  title        = {{{Fault attacks in pairing-based cryptography}}},
  year         = {{2011}},
}

@inproceedings{664,
  abstract     = {{Web Computing is a variant of parallel computing where the idle times of PCs donated by worldwide distributed users are employed to execute parallel programs. The PUB-Web library developed by us supports this kind of usage of computing resources. A major problem for the efficient execution of such parallel programs is load balancing. In the Web Computing context, this problem becomes more difficult because of the dynamic behavior of the underlying "parallel computer": the set of available processors (donated PCs) as well as their availability (idle times) change over time in an unpredictable fashion.In this paper, we experimentally evaluate and compare load balancing algorithms in this scenario, namely a variant of the well-established Work Stealing algorithm and strategies based on a heterogeneous version of distributed hash-tables (DHHTs) introduced recently. In order to run a meaningful experimental evaluation, we employ, in addition to our Web Computing library PUB-Web, realistic data sets for the job input streams and for the dynamics of the availability of the resources.Our experimental evaluations suggest that Work Stealing is the better strategy if the number of processes ready to run matches the number of available processors. But a suitable variant of DHHTs outperforms Work Stealing if there are significantly more processes ready to run than available processors.}},
  author       = {{Gehweiler, Joachim and Kling, Peter and Meyer auf der Heide, Friedhelm}},
  booktitle    = {{Proceedings of the 9th International Conference on Parallel Processing and Applied Mathematics (PPAM)}},
  pages        = {{31----40}},
  title        = {{{An Experimental Comparison of Load Balancing Strategies in a Web Computing Environment}}},
  doi          = {{10.1007/978-3-642-31500-8_4}},
  year         = {{2011}},
}

@inproceedings{6843,
  author       = {{Wendland, S. and Drobisch, A. and Krauter, Stefan and Grunow, Paul}},
  booktitle    = {{Tagungsband des 26. Symposiums für Photovoltaische Solarenergie,  Kloster Banz, Bad Staffelstein (Deutschland), 2.-4. März 2011 }},
  title        = {{{Hot-Spot-Untersuchungen in unterschiedlichen Arbeitspunkten bei PV-Modulen}}},
  year         = {{2011}},
}

