@inproceedings{63703,
  author       = {{Hoffbauer, Tilman and Hoos, Holger H. and Bossek, Jakob}},
  booktitle    = {{AAAI-25, Sponsored by the Association for the Advancement of Artificial Intelligence, February 25 - March 4, 2025, Philadelphia, PA, USA}},
  editor       = {{Walsh, Toby and Shah, Julie and Kolter, Zico}},
  pages        = {{17223–17230}},
  publisher    = {{AAAI Press}},
  title        = {{{KernelMatmul: Scaling Gaussian Processes to Large Time Series}}},
  doi          = {{10.1609/AAAI.V39I16.33893}},
  year         = {{2025}},
}

@inproceedings{63704,
  author       = {{Wittner, Dominic and Bossek, Jakob}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference, GECCO 2025, NH Malaga Hotel, Malaga, Spain, July 14-18, 2025}},
  editor       = {{Filipic, Bogdan}},
  pages        = {{340–348}},
  publisher    = {{ACM}},
  title        = {{{Cluster Prevention in Evolutionary Diversity Optimization for Parallel Machine Scheduling}}},
  doi          = {{10.1145/3712256.3726357}},
  year         = {{2025}},
}

@article{59073,
  author       = {{Rook, Jeroen G. and Benjamins, Carolin and Bossek, Jakob and Trautmann, Heike and Hoos, Holger H. and Lindauer, Marius}},
  issn         = {{1063-6560}},
  journal      = {{Evolutionary Computation}},
  pages        = {{1--25}},
  title        = {{{MO-SMAC: Multi-objective Sequential Model-based Algorithm Configuration}}},
  doi          = {{10.1162/evco_a_00371}},
  year         = {{2025}},
}

@inproceedings{60812,
  author       = {{Preuß, Oliver Ludger and Mensendiek, Carolin and Rook, Jeroen and Bossek, Jakob and Trautmann, Heike}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference, GECCO 2025, NH Malaga Hotel, Malaga, Spain, July 14-18, 2025}},
  editor       = {{Filipic, Bogdan}},
  pages        = {{58–66}},
  publisher    = {{ACM}},
  title        = {{{Automated Algorithm Configuration and Systematic Benchmarking for Heterogeneous MNK-Landscapes}}},
  doi          = {{10.1145/3712256.3726481}},
  year         = {{2025}},
}

@inproceedings{52749,
  author       = {{Seiler, Moritz and Rook, Jeroen and Heins, Jonathan and Preuß, Oliver Ludger and Bossek, Jakob and Trautmann, Heike}},
  booktitle    = {{2023 IEEE Symposium Series on Computational Intelligence (SSCI)}},
  publisher    = {{IEEE}},
  title        = {{{Using Reinforcement Learning for Per-Instance Algorithm Configuration on the TSP}}},
  doi          = {{10.1109/ssci52147.2023.10372008}},
  year         = {{2024}},
}

@inproceedings{63706,
  author       = {{Schmidbauer, Marcus and Opris, Andre and Bossek, Jakob and Neumann, Frank and Sudholt, Dirk}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference, GECCO 2024, Melbourne, VIC, Australia, July 14-18, 2024}},
  editor       = {{Li, Xiaodong and Handl, Julia}},
  publisher    = {{ACM}},
  title        = {{{Guiding Quality Diversity on Monotone Submodular Functions: Customising the Feature Space by Adding Boolean Conjunctions}}},
  doi          = {{10.1145/3638529.3654160}},
  year         = {{2024}},
}

@inproceedings{63705,
  author       = {{Bossek, Jakob and Grimme, Christian}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference, GECCO 2024, Melbourne, VIC, Australia, July 14-18, 2024}},
  editor       = {{Li, Xiaodong and Handl, Julia}},
  publisher    = {{ACM}},
  title        = {{{Generalised Kruskal Mutation for the Multi-Objective Minimum Spanning Tree Problem}}},
  doi          = {{10.1145/3638529.3654165}},
  year         = {{2024}},
}

@inproceedings{48869,
  abstract     = {{Evolutionary algorithms have been shown to obtain good solutions for complex optimization problems in static and dynamic environments. It is important to understand the behaviour of evolutionary algorithms for complex optimization problems that also involve dynamic and/or stochastic components in a systematic way in order to further increase their applicability to real-world problems. We investigate the node weighted traveling salesperson problem (W-TSP), which provides an abstraction of a wide range of weighted TSP problems, in dynamic settings. In the dynamic setting of the problem, items that have to be collected as part of a TSP tour change over time. We first present a dynamic setup for the dynamic W-TSP parameterized by different types of changes that are applied to the set of items to be collected when traversing the tour. Our first experimental investigations study the impact of such changes on resulting optimized tours in order to provide structural insights of optimization solutions. Afterwards, we investigate simple mutation-based evolutionary algorithms and study the impact of the mutation operators and the use of populations with dealing with the dynamic changes to the node weights of the problem.}},
  author       = {{Bossek, Jakob and Neumann, Aneta and Neumann, Frank}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference}},
  isbn         = {{9798400701191}},
  keywords     = {{dynamic optimization, evolutionary algorithms, re-optimization, weighted traveling salesperson problem}},
  pages        = {{248–256}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{On the Impact of Basic Mutation Operators and Populations within Evolutionary Algorithms for the Dynamic Weighted Traveling Salesperson Problem}}},
  doi          = {{10.1145/3583131.3590384}},
  year         = {{2023}},
}

@inproceedings{48872,
  abstract     = {{Quality diversity (QD) is a branch of evolutionary computation that gained increasing interest in recent years. The Map-Elites QD approach defines a feature space, i.e., a partition of the search space, and stores the best solution for each cell of this space. We study a simple QD algorithm in the context of pseudo-Boolean optimisation on the "number of ones" feature space, where the ith cell stores the best solution amongst those with a number of ones in [(i - 1)k, ik - 1]. Here k is a granularity parameter 1 {$\leq$} k {$\leq$} n+1. We give a tight bound on the expected time until all cells are covered for arbitrary fitness functions and for all k and analyse the expected optimisation time of QD on OneMax and other problems whose structure aligns favourably with the feature space. On combinatorial problems we show that QD finds a (1 - 1/e)-approximation when maximising any monotone sub-modular function with a single uniform cardinality constraint efficiently. Defining the feature space as the number of connected components of a connected graph, we show that QD finds a minimum spanning tree in expected polynomial time.}},
  author       = {{Bossek, Jakob and Sudholt, Dirk}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference}},
  isbn         = {{9798400701191}},
  keywords     = {{quality diversity, runtime analysis}},
  pages        = {{1546–1554}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{Runtime Analysis of Quality Diversity Algorithms}}},
  doi          = {{10.1145/3583131.3590383}},
  year         = {{2023}},
}

@inproceedings{48886,
  abstract     = {{Generating new instances via evolutionary methods is commonly used to create new benchmarking data-sets, with a focus on attempting to cover an instance-space as completely as possible. Recent approaches have exploited Quality-Diversity methods to evolve sets of instances that are both diverse and discriminatory with respect to a portfolio of solvers, but these methods can be challenging when attempting to find diversity in a high-dimensional feature-space. We address this issue by training a model based on Principal Component Analysis on existing instances to create a low-dimension projection of the high-dimension feature-vectors, and then apply Novelty Search directly in the new low-dimension space. We conduct experiments to evolve diverse and discriminatory instances of Knapsack Problems, comparing the use of Novelty Search in the original feature-space to using Novelty Search in a low-dimensional projection, and repeat over a given set of dimensions. We find that the methods are complementary: if treated as an ensemble, they collectively provide increased coverage of the space. Specifically, searching for novelty in a low-dimension space contributes 56% of the filled regions of the space, while searching directly in the feature-space covers the remaining 44%.}},
  author       = {{Marrero, Alejandro and Segredo, Eduardo and Hart, Emma and Bossek, Jakob and Neumann, Aneta}},
  booktitle    = {{Proceedings of the Genetic} and Evolutionary Computation Conference}},
  isbn         = {{9798400701191}},
  keywords     = {{evolutionary computation, instance generation, instance-space analysis, knapsack problem, novelty search}},
  pages        = {{312–320}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{Generating Diverse and Discriminatory Knapsack Instances by Searching for Novelty in Variable Dimensions of Feature-Space}}},
  doi          = {{10.1145/3583131.3590504}},
  year         = {{2023}},
}

@article{48871,
  abstract     = {{Most runtime analyses of randomised search heuristics focus on the expected number of function evaluations to find a unique global optimum. We ask a fundamental question: if additional search points are declared optimal, or declared as desirable target points, do these additional optima speed up evolutionary algorithms? More formally, we analyse the expected hitting time of a target set OPT{$\cup$}S where S is a set of non-optimal search points and OPT is the set of optima and compare it to the expected hitting time of OPT. We show that the answer to our question depends on the number and placement of search points in S. For all black-box algorithms and all fitness functions with polynomial expected optimisation times we show that, if additional optima are placed randomly, even an exponential number of optima has a negligible effect on the expected optimisation time. Considering Hamming balls around all global optima gives an easier target for some algorithms and functions and can shift the phase transition with respect to offspring population sizes in the (1,{$\lambda$}) EA on OneMax. However, for the one-dimensional Ising model the time to reach Hamming balls of radius (1/2-{$ϵ$})n around optima does not reduce the asymptotic expected optimisation time in the worst case. Finally, on functions where search trajectories typically join in a single search point, turning one search point into an optimum drastically reduces the expected optimisation time.}},
  author       = {{Bossek, Jakob and Sudholt, Dirk}},
  issn         = {{0304-3975}},
  journal      = {{Theoretical Computer Science}},
  keywords     = {{Evolutionary algorithms, pseudo-Boolean functions, runtime analysis}},
  pages        = {{113757}},
  title        = {{{Do Additional Target Points Speed Up Evolutionary Algorithms?}}},
  doi          = {{10.1016/j.tcs.2023.113757}},
  year         = {{2023}},
}

@article{48859,
  abstract     = {{We contribute to the efficient approximation of the Pareto-set for the classical NP-hard multi-objective minimum spanning tree problem (moMST) adopting evolutionary computation. More precisely, by building upon preliminary work, we analyse the neighborhood structure of Pareto-optimal spanning trees and design several highly biased sub-graph-based mutation operators founded on the gained insights. In a nutshell, these operators replace (un)connected sub-trees of candidate solutions with locally optimal sub-trees. The latter (biased) step is realized by applying Kruskal’s single-objective MST algorithm to a weighted sum scalarization of a sub-graph.We prove runtime complexity results for the introduced operators and investigate the desirable Pareto-beneficial property. This property states that mutants cannot be dominated by their parent. Moreover, we perform an extensive experimental benchmark study to showcase the operator’s practical suitability. Our results confirm that the subgraph based operators beat baseline algorithms from the literature even with severely restricted computational budget in terms of function evaluations on four different classes of complete graphs with different shapes of the Pareto-front.}},
  author       = {{Bossek, Jakob and Grimme, Christian}},
  issn         = {{1063-6560}},
  journal      = {{Evolutionary Computation}},
  pages        = {{1–35}},
  title        = {{{On Single-Objective Sub-Graph-Based Mutation for Solving the Bi-Objective Minimum Spanning Tree Problem}}},
  doi          = {{10.1162/evco_a_00335}},
  year         = {{2023}},
}

@article{46310,
  abstract     = {{Classic automated algorithm selection (AS) for (combinatorial) optimization problems heavily relies on so-called instance features, i.e., numerical characteristics of the problem at hand ideally extracted with computationally low-demanding routines. For the traveling salesperson problem (TSP) a plethora of features have been suggested. Most of these features are, if at all, only normalized imprecisely raising the issue of feature values being strongly affected by the instance size. Such artifacts may have detrimental effects on algorithm selection models. We propose a normalization for two feature groups which stood out in multiple AS studies on the TSP: (a) features based on a minimum spanning tree (MST) and (b) nearest neighbor relationships of the input instance. To this end we theoretically derive minimum and maximum values for properties of MSTs and k-nearest neighbor graphs (NNG) of Euclidean graphs. We analyze the differences in feature space between normalized versions of these features and their unnormalized counterparts. Our empirical investigations on various TSP benchmark sets point out that the feature scaling succeeds in eliminating the effect of the instance size. A proof-of-concept AS-study shows promising results: models trained with normalized features tend to outperform those trained with the respective vanilla features.}},
  author       = {{Heins, Jonathan and Bossek, Jakob and Pohl, Janina and Seiler, Moritz and Trautmann, Heike and Kerschke, Pascal}},
  issn         = {{0304-3975}},
  journal      = {{Theoretical Computer Science}},
  keywords     = {{Feature normalization, Algorithm selection, Traveling salesperson problem}},
  pages        = {{123--145}},
  title        = {{{A study on the effects of normalized TSP features for automated algorithm selection}}},
  doi          = {{https://doi.org/10.1016/j.tcs.2022.10.019}},
  volume       = {{940}},
  year         = {{2023}},
}

@inproceedings{48898,
  abstract     = {{Automated Algorithm Configuration (AAC) usually takes a global perspective: it identifies a parameter configuration for an (optimization) algorithm that maximizes a performance metric over a set of instances. However, the optimal choice of parameters strongly depends on the instance at hand and should thus be calculated on a per-instance basis. We explore the potential of Per-Instance Algorithm Configuration (PIAC) by using Reinforcement Learning (RL). To this end, we propose a novel PIAC approach that is based on deep neural networks. We apply it to predict configurations for the Lin\textendash Kernighan heuristic (LKH) for the Traveling Salesperson Problem (TSP) individually for every single instance. To train our PIAC approach, we create a large set of 100000 TSP instances with 2000 nodes each \textemdash currently the largest benchmark set to the best of our knowledge. We compare our approach to the state-of-the-art AAC method Sequential Model-based Algorithm Configuration (SMAC). The results show that our PIAC approach outperforms this baseline on both the newly created instance set and established instance sets.}},
  author       = {{Seiler, Moritz and Rook, Jeroen and Heins, Jonathan and Preuß, Oliver Ludger and Bossek, Jakob and Trautmann, Heike}},
  booktitle    = {{2023 IEEE Symposium Series on Computational Intelligence (SSCI)}},
  pages        = {{361 -- 368}},
  title        = {{{Using Reinforcement Learning for Per-Instance Algorithm Configuration on the TSP}}},
  doi          = {{10.1109/SSCI52147.2023.10372008}},
  year         = {{2023}},
}

@inproceedings{48861,
  abstract     = {{Generating instances of different properties is key to algorithm selection methods that differentiate between the performance of different solvers for a given combinatorial optimization problem. A wide range of methods using evolutionary computation techniques has been introduced in recent years. With this paper, we contribute to this area of research by providing a new approach based on quality diversity (QD) that is able to explore the whole feature space. QD algorithms allow to create solutions of high quality within a given feature space by splitting it up into boxes and improving solution quality within each box. We use our QD approach for the generation of TSP instances to visualize and analyze the variety of instances differentiating various TSP solvers and compare it to instances generated by established approaches from the literature.}},
  author       = {{Bossek, Jakob and Neumann, Frank}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference}},
  isbn         = {{978-1-4503-9237-2}},
  keywords     = {{instance features, instance generation, quality diversity, TSP}},
  pages        = {{186–194}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{Exploring the Feature Space of TSP Instances Using Quality Diversity}}},
  doi          = {{10.1145/3512290.3528851}},
  year         = {{2022}},
}

@inproceedings{48868,
  author       = {{Bossek, Jakob and Neumann, Aneta and Neumann, Frank}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference Companion}},
  isbn         = {{978-1-4503-9268-6}},
  pages        = {{824–842}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{Evolutionary Diversity Optimization for Combinatorial Optimization: Tutorial at GECCO’22, Boston, USA}}},
  doi          = {{10.1145/3520304.3533626}},
  year         = {{2022}},
}

@inproceedings{48882,
  abstract     = {{In multimodal multi-objective optimization (MMMOO), the focus is not solely on convergence in objective space, but rather also on explicitly ensuring diversity in decision space. We illustrate why commonly used diversity measures are not entirely appropriate for this task and propose a sophisticated basin-based evaluation (BBE) method. Also, BBE variants are developed, capturing the anytime behavior of algorithms. The set of BBE measures is tested by means of an algorithm configuration study. We show that these new measures also transfer properties of the well-established hypervolume (HV) indicator to the domain of MMMOO, thus also accounting for objective space convergence. Moreover, we advance MMMOO research by providing insights into the multimodal performance of the considered algorithms. Specifically, algorithms exploiting local structures are shown to outperform classical evolutionary multi-objective optimizers regarding the BBE variants and respective trade-off with HV.}},
  author       = {{Heins, Jonathan and Rook, Jeroen and Schäpermeier, Lennart and Kerschke, Pascal and Bossek, Jakob and Trautmann, Heike}},
  booktitle    = {{Parallel Problem Solving from Nature (PPSN XVII)}},
  editor       = {{Rudolph, Günter and Kononova, Anna V. and Aguirre, Hernán and Kerschke, Pascal and Ochoa, Gabriela and Tusar, Tea}},
  isbn         = {{978-3-031-14714-2}},
  keywords     = {{Anytime behavior, Benchmarking, Continuous optimization, Multi-objective optimization, Multimodality, Performance metric}},
  pages        = {{192–206}},
  publisher    = {{Springer International Publishing}},
  title        = {{{BBE: Basin-Based Evaluation of Multimodal Multi-objective Optimization Problems}}},
  doi          = {{10.1007/978-3-031-14714-2_14}},
  year         = {{2022}},
}

@inproceedings{48894,
  abstract     = {{Recently different evolutionary computation approaches have been developed that generate sets of high quality diverse solutions for a given optimisation problem. Many studies have considered diversity 1) as a mean to explore niches in behavioural space (quality diversity) or 2) to increase the structural differences of solutions (evolutionary diversity optimisation). In this study, we introduce a co-evolutionary algorithm to simultaneously explore the two spaces for the multi-component traveling thief problem. The results show the capability of the co-evolutionary algorithm to achieve significantly higher diversity compared to the baseline evolutionary diversity algorithms from the literature.}},
  author       = {{Nikfarjam, Adel and Neumann, Aneta and Bossek, Jakob and Neumann, Frank}},
  booktitle    = {{Parallel Problem Solving from Nature (PPSN XVII)}},
  editor       = {{Rudolph, Günter and Kononova, Anna V. and Aguirre, Hernán and Kerschke, Pascal and Ochoa, Gabriela and Tu\v sar, Tea}},
  isbn         = {{978-3-031-14714-2}},
  keywords     = {{Co-evolutionary algorithms, Evolutionary diversity optimisation, Quality diversity, Traveling thief problem}},
  pages        = {{237–249}},
  publisher    = {{Springer International Publishing}},
  title        = {{{Co-Evolutionary Diversity Optimisation for the Traveling Thief Problem}}},
  doi          = {{10.1007/978-3-031-14714-2_17}},
  year         = {{2022}},
}

@article{48878,
  abstract     = {{Due to the rise of continuous data-generating applications, analyzing data streams has gained increasing attention over the past decades. A core research area in stream data is stream classification, which categorizes or detects data points within an evolving stream of observations. Areas of stream classification are diverse\textemdash ranging, e.g., from monitoring sensor data to analyzing a wide range of (social) media applications. Research in stream classification is related to developing methods that adapt to the changing and potentially volatile data stream. It focuses on individual aspects of the stream classification pipeline, e.g., designing suitable algorithm architectures, an efficient train and test procedure, or detecting so-called concept drifts. As a result of the many different research questions and strands, the field is challenging to grasp, especially for beginners. This survey explores, summarizes, and categorizes work within the domain of stream classification and identifies core research threads over the past few years. It is structured based on the stream classification process to facilitate coordination within this complex topic, including common application scenarios and benchmarking data sets. Thus, both newcomers to the field and experts who want to widen their scope can gain (additional) insight into this research area and find starting points and pointers to more in-depth literature on specific issues and research directions in the field.}},
  author       = {{Clever, Lena and Pohl, Janina Susanne and Bossek, Jakob and Kerschke, Pascal and Trautmann, Heike}},
  issn         = {{2076-3417}},
  journal      = {{Applied Sciences}},
  keywords     = {{big data, data mining, data stream analysis, machine learning, stream classification, supervised learning}},
  number       = {{18}},
  pages        = {{9094}},
  publisher    = {{{Multidisciplinary Digital Publishing Institute}}},
  title        = {{{Process-Oriented Stream Classification Pipeline: A Literature Review}}},
  doi          = {{10.3390/app12189094}},
  volume       = {{12}},
  year         = {{2022}},
}

@inproceedings{48896,
  abstract     = {{Hardness of Multi-Objective (MO) continuous optimization problems results from an interplay of various problem characteristics, e. g. the degree of multi-modality. We present a benchmark study of classical and diversity focused optimizers on multi-modal MO problems based on automated algorithm configuration. We show the large effect of the latter and investigate the trade-off between convergence in objective space and diversity in decision space.}},
  author       = {{Rook, Jeroen and Trautmann, Heike and Bossek, Jakob and Grimme, Christian}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference Companion}},
  isbn         = {{978-1-4503-9268-6}},
  keywords     = {{configuration, multi-modality, multi-objective optimization}},
  pages        = {{356–359}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{On the Potential of Automated Algorithm Configuration on Multi-Modal Multi-Objective Optimization Problems}}},
  doi          = {{10.1145/3520304.3528998}},
  year         = {{2022}},
}

