@article{54548,
  author       = {{Prager, Raphael Patrick and Trautmann, Heike}},
  journal      = {{IEEE Transactions on Evolutionary Computation}},
  keywords     = {{Optimization, Evolutionary computation, Benchmark testing, Hyperparameter optimization, Portfolios, Extraterrestrial measurements, Dispersion, Exploratory landscape analysis, mixed-variable problem, mixed search spaces, automated algorithm selection}},
  pages        = {{1--1}},
  title        = {{{Exploratory Landscape Analysis for Mixed-Variable Problems}}},
  doi          = {{10.1109/TEVC.2024.3399560}},
  year         = {{2024}},
}

@inbook{48881,
  abstract     = {{Classic automated algorithm selection (AS) for (combinatorial) optimization problems heavily relies on so-called instance features, i.e., numerical characteristics of the problem at hand ideally extracted with computationally low-demanding routines. For the traveling salesperson problem (TSP) a plethora of features have been suggested. Most of these features are, if at all, only normalized imprecisely raising the issue of feature values being strongly affected by the instance size. Such artifacts may have detrimental effects on algorithm selection models. We propose a normalization for two feature groups which stood out in multiple AS studies on the TSP: (a) features based on a minimum spanning tree (MST) and (b) a k-nearest neighbor graph (NNG) transformation of the input instance. To this end we theoretically derive minimum and maximum values for properties of MSTs and k-NNGs of Euclidean graphs. We analyze the differences in feature space between normalized versions of these features and their unnormalized counterparts. Our empirical investigations on various TSP benchmark sets point out that the feature scaling succeeds in eliminating the effect of the instance size. Eventually, a proof-of-concept AS-study shows promising results: models trained with normalized features tend to outperform those trained with the respective vanilla features.}},
  author       = {{Heins, Jonathan and Bossek, Jakob and Pohl, Janina and Seiler, Moritz and Trautmann, Heike and Kerschke, Pascal}},
  booktitle    = {{Proceedings of the 16th ACM/SIGEVO Conference on Foundations of Genetic Algorithms}},
  isbn         = {{978-1-4503-8352-3}},
  keywords     = {{automated algorithm selection, graph theory, instance features, normalization, traveling salesperson problem (TSP)}},
  pages        = {{1–15}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{On the Potential of Normalized TSP Features for Automated Algorithm Selection}}},
  year         = {{2021}},
}

@inproceedings{48897,
  abstract     = {{In this work we focus on the well-known Euclidean Traveling Salesperson Problem (TSP) and two highly competitive inexact heuristic TSP solvers, EAX and LKH, in the context of per-instance algorithm selection (AS). We evolve instances with nodes where the solvers show strongly different performance profiles. These instances serve as a basis for an exploratory study on the identification of well-discriminating problem characteristics (features). Our results in a nutshell: we show that even though (1) promising features exist, (2) these are in line with previous results from the literature, and (3) models trained with these features are more accurate than models adopting sophisticated feature selection methods, the advantage is not close to the virtual best solver in terms of penalized average runtime and so is the performance gain over the single best solver. However, we show that a feature-free deep neural network based approach solely based on visual representation of the instances already matches classical AS model results and thus shows huge potential for future studies.}},
  author       = {{Seiler, Moritz and Pohl, Janina and Bossek, Jakob and Kerschke, Pascal and Trautmann, Heike}},
  booktitle    = {{Parallel Problem Solving from {Nature} (PPSN XVI)}},
  isbn         = {{978-3-030-58111-4}},
  keywords     = {{Automated algorithm selection, Deep learning, Feature-based approaches, Traveling Salesperson Problem}},
  pages        = {{48–64}},
  publisher    = {{Springer-Verlag}},
  title        = {{{Deep Learning as a Competitive Feature-Free Approach for Automated Algorithm Selection on the Traveling Salesperson Problem}}},
  doi          = {{10.1007/978-3-030-58112-1_4}},
  year         = {{2020}},
}

@article{48884,
  abstract     = {{The Travelling Salesperson Problem (TSP) is one of the best-studied NP-hard problems. Over the years, many different solution approaches and solvers have been developed. For the first time, we directly compare five state-of-the-art inexact solvers\textemdash namely, LKH, EAX, restart variants of those, and MAOS\textemdash on a large set of well-known benchmark instances and demonstrate complementary performance, in that different instances may be solved most effectively by different algorithms. We leverage this complementarity to build an algorithm selector, which selects the best TSP solver on a per-instance basis and thus achieves significantly improved performance compared to the single best solver, representing an advance in the state of the art in solving the Euclidean TSP. Our in-depth analysis of the selectors provides insight into what drives this performance improvement.}},
  author       = {{Kerschke, Pascal and Kotthoff, Lars and Bossek, Jakob and Hoos, Holger H. and Trautmann, Heike}},
  issn         = {{1063-6560}},
  journal      = {{Evolutionary Computation}},
  keywords     = {{automated algorithm selection, machine learning., performance modeling, Travelling Salesperson Problem}},
  number       = {{4}},
  pages        = {{597–620}},
  title        = {{{Leveraging TSP Solver Complementarity through Machine Learning}}},
  doi          = {{10.1162/evco_a_00215}},
  volume       = {{26}},
  year         = {{2018}},
}

