@inproceedings{10232,
  abstract     = {{Existing tools for automated machine learning, such as Auto-WEKA, TPOT, auto-sklearn, and more recently ML-Plan, have shown impressive results for the tasks of single-label classification and regression. Yet, there is only little work on other types of machine learning problems so far. In particular, there is almost no work on automating the engineering of machine learning solutions for multi-label classification (MLC). We show how the scope of ML-Plan, an AutoML-tool for multi-class classification, can be extended towards MLC using MEKA, which is a multi-label extension of the well-known Java library WEKA. The resulting approach recursively refines MEKA's multi-label classifiers, nesting other multi-label classifiers for meta algorithms and single-label classifiers provided by WEKA as base learners. In our evaluation, we find that the proposed approach yields strong results and performs significantly better than a set of baselines we compare with.}},
  author       = {{Wever, Marcel Dominik and Mohr, Felix and Tornede, Alexander and Hüllermeier, Eyke}},
  location     = {{Long Beach, CA, USA}},
  title        = {{{Automating Multi-Label Classification Extending ML-Plan}}},
  year         = {{2019}},
}

@article{20243,
  author       = {{Rohlfing, Katharina and Leonardi, Giuseppe and Nomikou, Iris and Rączaszek-Leonardi, Joanna and Hüllermeier, Eyke}},
  journal      = {{IEEE Transactions on Cognitive and Developmental Systems}},
  title        = {{{Multimodal Turn-Taking: Motivations, Methodological Challenges, and Novel Approaches}}},
  doi          = {{10.1109/TCDS.2019.2892991}},
  year         = {{2019}},
}

@inproceedings{2479,
  author       = {{Mohr, Felix and Wever, Marcel Dominik and Hüllermeier, Eyke and Faez, Amin}},
  booktitle    = {{SCC}},
  location     = {{San Francisco, CA, USA}},
  publisher    = {{IEEE}},
  title        = {{{(WIP) Towards the Automated Composition of Machine Learning Services}}},
  doi          = {{10.1109/SCC.2018.00039}},
  year         = {{2018}},
}

@inproceedings{2857,
  author       = {{Mohr, Felix and Lettmann, Theodor and Hüllermeier, Eyke and Wever, Marcel Dominik}},
  booktitle    = {{Proceedings of the 1st ICAPS Workshop on Hierarchical Planning}},
  location     = {{Delft, Netherlands}},
  pages        = {{31--39}},
  publisher    = {{AAAI}},
  title        = {{{Programmatic Task Network Planning}}},
  year         = {{2018}},
}

@inproceedings{2471,
  author       = {{Mohr, Felix and Wever, Marcel Dominik and Hüllermeier, Eyke}},
  booktitle    = {{SCC}},
  location     = {{San Francisco, CA, USA}},
  publisher    = {{IEEE Computer Society}},
  title        = {{{On-The-Fly Service Construction with Prototypes}}},
  doi          = {{10.1109/SCC.2018.00036}},
  year         = {{2018}},
}

@article{3402,
  abstract     = {{In machine learning, so-called nested dichotomies are utilized as a reduction technique, i.e., to decompose a multi-class classification problem into a set of binary problems, which are solved using a simple binary classifier as a base learner. The performance of the (multi-class) classifier thus produced strongly depends on the structure of the decomposition. In this paper, we conduct an empirical study, in which we compare existing heuristics for selecting a suitable structure in the form of a nested dichotomy. Moreover, we propose two additional heuristics as natural completions. One of them is the Best-of-K heuristic, which picks the (presumably) best among K randomly generated nested dichotomies. Surprisingly, and in spite of its simplicity, it turns out to outperform the state of the art.}},
  author       = {{Melnikov, Vitalik and Hüllermeier, Eyke}},
  issn         = {{1573-0565}},
  journal      = {{Machine Learning}},
  title        = {{{On the effectiveness of heuristics for learning nested dichotomies: an empirical analysis}}},
  doi          = {{10.1007/s10994-018-5733-1}},
  year         = {{2018}},
}

@article{3510,
  abstract     = {{Automated machine learning (AutoML) seeks to automatically select, compose, and parametrize machine learning algorithms, so as to achieve optimal performance on a given task (dataset). Although current approaches to AutoML have already produced impressive results, the field is still far from mature, and new techniques are still being developed. In this paper, we present ML-Plan, a new approach to AutoML based on hierarchical planning. To highlight the potential of this approach, we compare ML-Plan to the state-of-the-art frameworks Auto-WEKA, auto-sklearn, and TPOT. In an extensive series of experiments, we show that ML-Plan is highly competitive and often outperforms existing approaches.}},
  author       = {{Mohr, Felix and Wever, Marcel Dominik and Hüllermeier, Eyke}},
  issn         = {{1573-0565}},
  journal      = {{Machine Learning}},
  keywords     = {{AutoML, Hierarchical Planning, HTN planning, ML-Plan}},
  location     = {{Dublin, Ireland}},
  pages        = {{1495--1515}},
  publisher    = {{Springer}},
  title        = {{{ML-Plan: Automated Machine Learning via Hierarchical Planning}}},
  doi          = {{10.1007/s10994-018-5735-z}},
  year         = {{2018}},
}

@inproceedings{3552,
  author       = {{Mohr, Felix and Wever, Marcel Dominik and Hüllermeier, Eyke}},
  booktitle    = {{Proceedings of the Symposium on Intelligent Data Analysis}},
  location     = {{‘s-Hertogenbosch, the Netherlands}},
  title        = {{{Reduction Stumps for Multi-Class Classification}}},
  doi          = {{10.1007/978-3-030-01768-2_19}},
  year         = {{2018}},
}

@inproceedings{3852,
  abstract     = {{In automated machine learning (AutoML), the process of engineering machine learning applications with respect to a specific problem is (partially) automated.
Various AutoML tools have already been introduced to provide out-of-the-box machine learning functionality.
More specifically, by selecting machine learning algorithms and optimizing their hyperparameters, these tools produce a machine learning pipeline tailored to the problem at hand.
Except for TPOT, all of these tools restrict the maximum number of processing steps of such a pipeline.
However, as TPOT follows an evolutionary approach, it suffers from performance issues when dealing with larger datasets.
In this paper, we present an alternative approach leveraging a hierarchical planning to configure machine learning pipelines that are unlimited in length.
We evaluate our approach and find its performance to be competitive with other AutoML tools, including TPOT.}},
  author       = {{Wever, Marcel Dominik and Mohr, Felix and Hüllermeier, Eyke}},
  booktitle    = {{ICML 2018 AutoML Workshop}},
  keywords     = {{automated machine learning, complex pipelines, hierarchical planning}},
  location     = {{Stockholm, Sweden}},
  title        = {{{ML-Plan for Unlimited-Length Machine Learning Pipelines}}},
  year         = {{2018}},
}

@inproceedings{2109,
  abstract     = {{In multinomial classification, reduction techniques are commonly used to decompose the original learning problem into several simpler problems. For example, by recursively bisecting the original set of classes, so-called nested dichotomies define a set of binary classification problems that are organized in the structure of a binary tree. In contrast to the existing one-shot heuristics for constructing nested dichotomies and motivated by recent work on algorithm configuration, we propose a genetic algorithm for optimizing the structure of such dichotomies. A key component of this approach is the proposed genetic representation that facilitates the application of standard genetic operators, while still supporting the exchange of partial solutions under recombination. We evaluate the approach in an extensive experimental study, showing that it yields classifiers with superior generalization performance.}},
  author       = {{Wever, Marcel Dominik and Mohr, Felix and Hüllermeier, Eyke}},
  booktitle    = {{Proceedings of the Genetic and Evolutionary Computation Conference, GECCO 2018, Kyoto, Japan, July 15-19, 2018}},
  keywords     = {{Classification, Hierarchical Decomposition, Indirect Encoding}},
  location     = {{Kyoto, Japan}},
  publisher    = {{ACM}},
  title        = {{{Ensembles of Evolved Nested Dichotomies for Classification}}},
  doi          = {{10.1145/3205455.3205562}},
  year         = {{2018}},
}

@unpublished{17713,
  author       = {{Wever, Marcel Dominik and Mohr, Felix and Hüllermeier, Eyke}},
  publisher    = {{Arxiv}},
  title        = {{{Automated Multi-Label Classification based on ML-Plan}}},
  year         = {{2018}},
}

@unpublished{17714,
  author       = {{Mohr, Felix and Wever, Marcel Dominik and Hüllermeier, Eyke}},
  title        = {{{Automated machine learning service composition}}},
  year         = {{2018}},
}

@inbook{6423,
  author       = {{Schäfer, Dirk and Hüllermeier, Eyke}},
  booktitle    = {{Discovery Science}},
  isbn         = {{9783030017705}},
  issn         = {{0302-9743}},
  pages        = {{161--175}},
  publisher    = {{Springer International Publishing}},
  title        = {{{Preference-Based Reinforcement Learning Using Dyad Ranking}}},
  doi          = {{10.1007/978-3-030-01771-2_11}},
  year         = {{2018}},
}

@proceedings{10591,
  editor       = {{Abiteboul, S. and Arenas, M. and Barceló, P. and Bienvenu, M. and Calvanese, D. and David, C. and Hull, R. and Hüllermeier, Eyke and Kimelfeld, B. and Libkin, L. and Martens, W. and Milo, T. and Murlak, F. and Neven, F. and Ortiz, M. and Schwentick, T. and Stoyanovich, J. and Su, J. and Suciu, D. and Vianu, V. and Yi, K.}},
  number       = {{1}},
  pages        = {{1--29}},
  title        = {{{Research Directions for Principles of Data Management}}},
  volume       = {{7}},
  year         = {{2018}},
}

@inbook{10783,
  author       = {{Couso, Ines and Hüllermeier, Eyke}},
  booktitle    = {{Frontiers in Computational Intelligence}},
  editor       = {{Mostaghim, Sanaz and Nürnberger, Andreas and Borgelt, Christian}},
  pages        = {{31--46}},
  publisher    = {{Springer}},
  title        = {{{Statistical Inference for Incomplete Ranking Data: A Comparison of two likelihood-based estimators}}},
  year         = {{2018}},
}

@article{16038,
  author       = {{Schäfer, D. and Hüllermeier, Eyke}},
  journal      = {{Machine Learning}},
  number       = {{5}},
  pages        = {{903--941}},
  title        = {{{Dyad ranking using Plackett-Luce models based on joint feature representations}}},
  volume       = {{107}},
  year         = {{2018}},
}

@inproceedings{10145,
  author       = {{Ahmadi Fahandar, Mohsen and Hüllermeier, Eyke}},
  booktitle    = {{Proc. 32 nd AAAI Conference on Artificial Intelligence (AAAI)}},
  pages        = {{2951--2958}},
  title        = {{{Learning to Rank Based on Analogical Reasoning}}},
  year         = {{2018}},
}

@inproceedings{10148,
  author       = {{El Mesaoudi-Paul, Adil and Hüllermeier, Eyke and Busa-Fekete, Robert}},
  booktitle    = {{Proc. 35th Int. Conference on Machine Learning (ICML)}},
  pages        = {{3469--3477}},
  publisher    = {{Verlagsschriftenreihe des Heinz Nixdorf Instituts, Paderborn}},
  title        = {{{Ranking Distributions based on Noisy Sorting}}},
  year         = {{2018}},
}

@inproceedings{10149,
  author       = {{Hesse, M. and Timmermann, J. and Hüllermeier, Eyke and Trächtler, Ansgar}},
  booktitle    = {{Proc. 4th Int. Conference on System-Integrated Intelligence: Intelligent, Flexible and Connected Systems in Products and Production, Procedia Manufacturing 24}},
  pages        = {{15--20}},
  title        = {{{A Reinforcement Learning Strategy for the Swing-Up of the Double Pendulum on a Cart}}},
  year         = {{2018}},
}

@inbook{10152,
  author       = {{Mencia, E.Loza and Fürnkranz, J. and Hüllermeier, Eyke and Rapp, M.}},
  booktitle    = {{Explainable and Interpretable Models in Computer Vision and Machine Learning}},
  editor       = {{Jair Escalante, H. and Escalera, S. and Guyon, I. and Baro, X. and Güclüütürk, Y. and Güclü, U. and van Gerven, M.A.J.}},
  pages        = {{81--113}},
  publisher    = {{Springer}},
  title        = {{{Learning interpretable rules for multi-label classification}}},
  year         = {{2018}},
}

