@inproceedings{10673, author = {{Ho, Nam and Ahmed, Abdullah Fathi and Kaufmann, Paul and Platzner, Marco}}, booktitle = {{Proc. NASA/ESA Conf. Adaptive Hardware and Systems (AHS)}}, keywords = {{cache storage, field programmable gate arrays, multiprocessing systems, parallel architectures, reconfigurable architectures, FPGA, dynamic reconfiguration, evolvable cache mapping, many-core architecture, memory-to-cache address mapping function, microarchitectural optimization, multicore architecture, nature-inspired optimization, parallelization degrees, processor, reconfigurable cache mapping, reconfigurable computing, Field programmable gate arrays, Software, Tuning}}, pages = {{1--7}}, title = {{{Microarchitectural optimization by means of reconfigurable and evolvable cache mappings}}}, doi = {{10.1109/AHS.2015.7231178}}, year = {{2015}}, } @inproceedings{48838, abstract = {{The majority of algorithms can be controlled or adjusted by parameters. Their values can substantially affect the algorithms’ performance. Since the manual exploration of the parameter space is tedious – even for few parameters – several automatic procedures for parameter tuning have been proposed. Recent approaches also take into account some characteristic properties of the problem instances, frequently termed instance features. Our contribution is the proposal of a novel concept for feature-based algorithm parameter tuning, which applies an approximating surrogate model for learning the continuous feature-parameter mapping. To accomplish this, we learn a joint model of the algorithm performance based on both the algorithm parameters and the instance features. The required data is gathered using a recently proposed acquisition function for model refinement in surrogate-based optimization: the profile expected improvement. This function provides an avenue for maximizing the information required for the feature-parameter mapping, i.e., the mapping from instance features to the corresponding optimal algorithm parameters. The approach is validated by applying the tuner to exemplary evolutionary algorithms and problems, for which theoretically grounded or heuristically determined feature-parameter mappings are available.}}, author = {{Bossek, Jakob and Bischl, Bernd and Wagner, Tobias and Rudolph, Günter}}, booktitle = {{Proceedings of the Genetic and Evolutionary Computation Conference}}, isbn = {{978-1-4503-3472-3}}, keywords = {{evolutionary algorithms, model-based optimization, parameter tuning}}, pages = {{1319–1326}}, publisher = {{Association for Computing Machinery}}, title = {{{Learning Feature-Parameter Mappings for Parameter Tuning via the Profile Expected Improvement}}}, doi = {{10.1145/2739480.2754673}}, year = {{2015}}, }