@article{54548,
  author       = {{Prager, Raphael Patrick and Trautmann, Heike}},
  journal      = {{IEEE Transactions on Evolutionary Computation}},
  keywords     = {{Optimization, Evolutionary computation, Benchmark testing, Hyperparameter optimization, Portfolios, Extraterrestrial measurements, Dispersion, Exploratory landscape analysis, mixed-variable problem, mixed search spaces, automated algorithm selection}},
  pages        = {{1--1}},
  title        = {{{Exploratory Landscape Analysis for Mixed-Variable Problems}}},
  doi          = {{10.1109/TEVC.2024.3399560}},
  year         = {{2024}},
}

@inproceedings{47522,
  abstract     = {{Artificial benchmark functions are commonly used in optimization research because of their ability to rapidly evaluate potential solutions, making them a preferred substitute for real-world problems. However, these benchmark functions have faced criticism for their limited resemblance to real-world problems. In response, recent research has focused on automatically generating new benchmark functions for areas where established test suites are inadequate. These approaches have limitations, such as the difficulty of generating new benchmark functions that exhibit exploratory landscape analysis (ELA) features beyond those of existing benchmarks.The objective of this work is to develop a method for generating benchmark functions for single-objective continuous optimization with user-specified structural properties. Specifically, we aim to demonstrate a proof of concept for a method that uses an ELA feature vector to specify these properties in advance. To achieve this, we begin by generating a random sample of decision space variables and objective values. We then adjust the objective values using CMA-ES until the corresponding features of our new problem match the predefined ELA features within a specified threshold. By iteratively transforming the landscape in this way, we ensure that the resulting function exhibits the desired properties. To create the final function, we use the resulting point cloud as training data for a simple neural network that produces a function exhibiting the target ELA features. We demonstrate the effectiveness of this approach by replicating the existing functions of the well-known BBOB suite and creating new functions with ELA feature values that are not present in BBOB.}},
  author       = {{Prager, Raphael Patrick and Dietrich, Konstantin and Schneider, Lennart and Schäpermeier, Lennart and Bischl, Bernd and Kerschke, Pascal and Trautmann, Heike and Mersmann, Olaf}},
  booktitle    = {{Proceedings of the 17th ACM/SIGEVO Conference on Foundations of Genetic Algorithms}},
  isbn         = {{9798400702020}},
  keywords     = {{Benchmarking, Instance Generator, Black-Box Continuous Optimization, Exploratory Landscape Analysis, Neural Networks}},
  pages        = {{129–139}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{Neural Networks as Black-Box Benchmark Functions Optimized for Exploratory Landscape Features}}},
  doi          = {{10.1145/3594805.3607136}},
  year         = {{2023}},
}

@inproceedings{46396,
  abstract     = {{The steady supply of new optimization methods makes the algorithm selection problem (ASP) an increasingly pressing and challenging task, specially for real-world black-box optimization problems. The introduced approach considers the ASP as a cost-sensitive classification task which is based on Exploratory Landscape Analysis. Low-level features gathered by systematic sampling of the function on the feasible set are used to predict a well-performing algorithm out of a given portfolio. Example-specific label costs are defined by the expected runtime of each candidate algorithm. We use one-sided support vector regression to solve this learning problem. The approach is illustrated by means of the optimization problems and algorithms of the BBOB’09/10 workshop.}},
  author       = {{Bischl, Bernd and Mersmann, Olaf and Trautmann, Heike and Preuß, Mike}},
  booktitle    = {{Proceedings of the 14th Annual Conference on Genetic and Evolutionary Computation}},
  isbn         = {{9781450311779}},
  keywords     = {{machine learning, exploratory landscape analysis, fitness landscape, benchmarking, evolutionary optimization, bbob test set, algorithm selection}},
  pages        = {{313–320}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{Algorithm Selection Based on Exploratory Landscape Analysis and Cost-Sensitive Learning}}},
  doi          = {{10.1145/2330163.2330209}},
  year         = {{2012}},
}

@inproceedings{46401,
  abstract     = {{Exploratory Landscape Analysis subsumes a number of techniques employed to obtain knowledge about the properties of an unknown optimization problem, especially insofar as these properties are important for the performance of optimization algorithms. Where in a first attempt, one could rely on high-level features designed by experts, we approach the problem from a different angle here, namely by using relatively cheap low-level computer generated features. Interestingly, very few features are needed to separate the BBOB problem groups and also for relating a problem to high-level, expert designed features, paving the way for automatic algorithm selection.}},
  author       = {{Mersmann, Olaf and Bischl, Bernd and Trautmann, Heike and Preuss, Mike and Weihs, Claus and Rudolph, Günter}},
  booktitle    = {{Proceedings of the 13th Annual Conference on Genetic and Evolutionary Computation}},
  isbn         = {{9781450305570}},
  keywords     = {{exploratory landscape analysis, evolutionary optimization, fitness landscape, benchmarking, BBOB test set}},
  pages        = {{829–836}},
  publisher    = {{Association for Computing Machinery}},
  title        = {{{Exploratory Landscape Analysis}}},
  doi          = {{10.1145/2001576.2001690}},
  year         = {{2011}},
}

