@inproceedings{63918,
  abstract     = {{Many real-world datasets, such as citation networks, social networks, and molecular structures, are naturally represented as heterogeneous graphs, where nodes belong to different types and have additional features. For example, in a citation network, nodes representing "Paper" or "Author" may include attributes like keywords or affiliations. A critical machine learning task on these graphs is node classification, which is useful for applications such as fake news detection, corporate risk assessment, and molecular property prediction. Although Heterogeneous Graph Neural Networks (HGNNs) perform well in these contexts, their predictions remain opaque. Existing post-hoc explanation methods lack support for actual node features beyond one-hot encoding of node type and often fail to generate realistic, faithful explanations. To address these gaps, we propose DiGNNExplainer, a model-level explanation approach that synthesizes heterogeneous graphs with realistic node features via discrete denoising diffusion. In particular, we generate realistic discrete features (e.g., bag-of-words features) using diffusion models within a discrete space, whereas previous approaches are limited to continuous spaces. We evaluate our approach on multiple datasets and show that DiGNNExplainer produces explanations that are realistic and faithful to the model's decision-making, outperforming state-of-the-art methods.}},
  author       = {{Das, Pallabee and Heindorf, Stefan}},
  booktitle    = {{Proceedings of the ACM Web Conference 2026 (WWW ’26)}},
  location     = {{Dubai, United Arab Emirates}},
  publisher    = {{ACM}},
  title        = {{{Discrete Diffusion-Based Model-Level Explanation of Heterogeneous GNNs with Node Features}}},
  year         = {{2026}},
}

@article{54450,
  abstract     = {{In the last decade, there has been increasing interest in allowing users to understand how the predictions of machine-learned models come about, thus increasing transparency and empowering users to understand and potentially contest those decisions.Dialogue-based approaches, in contrast to traditional one-shot eXplainable Artificial Intelligence (XAI) methods, facilitate interactive, in-depth exploration through multi-turn dialogues, simulating expert conversations. This paper reviews the current state of dialogue-based XAI, presenting a systematic review of 1,339 publications, narrowed down to 14 based on inclusion criteria. We explore theoretical foundations of the systems, propose key dimensions along which different solutions to dialogue-based XAI differ, and identify key use cases, target audiences, system components, and the types of supported queries and responses. Furthermore, we investigate the current paradigms by which systems are evaluated and highlight their key limitations. Key findings include identifying the main use cases, objectives, and audiences targeted by dialogue-based XAI methods, and summarize the main types of questions and information needs. Beyond discussing avenues for future work, we present a meta-architecture for these systems from existing literature and outlined prevalent theoretical frameworks.}},
  author       = {{Mindlin, Dimitry and Beer, Fabian and Sieger, Leonie Nora and Heindorf, Stefan and Cimiano, Philipp and Esposito, Elena and Ngonga Ngomo, Axel-Cyrille}},
  journal      = {{Artificial Intelligence Review}},
  number       = {{3}},
  publisher    = {{Springer}},
  title        = {{{Beyond One-Shot Explanations: A Systematic Literature Review of Dialogue-Based XAI Approaches}}},
  doi          = {{10.1007/s10462-024-11007-7}},
  volume       = {{58}},
  year         = {{2025}},
}

@article{64206,
  author       = {{Groß, Sebastian and Heindorf, Stefan and Terhörst, Philipp}},
  journal      = {{CoRR}},
  title        = {{{A Responsible Face Recognition Approach for Small and Mid-Scale Systems Through Personalized Neural Networks}}},
  doi          = {{10.48550/ARXIV.2505.19920}},
  volume       = {{abs/2505.19920}},
  year         = {{2025}},
}

@inproceedings{62707,
  author       = {{Heindorf, Stefan and Neib, Daniel}},
  booktitle    = {{Proceedings of the 34th ACM International Conference on Information and Knowledge Management}},
  publisher    = {{ACM}},
  title        = {{{Assessing Natural Language Explanations of Relational Graph Neural Networks}}},
  doi          = {{10.1145/3746252.3760918}},
  year         = {{2025}},
}

@inproceedings{54449,
  author       = {{KOUAGOU, N'Dah Jean and Demir, Caglar and Zahera, Hamada Mohamed Abdelsamee and Wilke, Adrian and Heindorf, Stefan and Li, Jiayi and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{Companion Proceedings of the ACM on Web Conference 2024}},
  location     = {{Singapore}},
  publisher    = {{ACM}},
  title        = {{{Universal Knowledge Graph Embeddings}}},
  doi          = {{10.1145/3589335.3651978}},
  year         = {{2024}},
}

@unpublished{54448,
  abstract     = {{Graph Neural Networks (GNNs) are effective for node classification in
graph-structured data, but they lack explainability, especially at the global
level. Current research mainly utilizes subgraphs of the input as local
explanations or generates new graphs as global explanations. However, these
graph-based methods are limited in their ability to explain classes with
multiple sufficient explanations. To provide more expressive explanations, we
propose utilizing class expressions (CEs) from the field of description logic
(DL). Our approach explains heterogeneous graphs with different types of nodes
using CEs in the EL description logic. To identify the best explanation among
multiple candidate explanations, we employ and compare two different scoring
functions: (1) For a given CE, we construct multiple graphs, have the GNN make
a prediction for each graph, and aggregate the predicted scores. (2) We score
the CE in terms of fidelity, i.e., we compare the predictions of the GNN to the
predictions by the CE on a separate validation set. Instead of subgraph-based
explanations, we offer CE-based explanations.}},
  author       = {{Köhler, Dominik and Heindorf, Stefan}},
  booktitle    = {{arXiv:2405.12654}},
  title        = {{{Utilizing Description Logics for Global Explanations of Heterogeneous  Graph Neural Networks}}},
  year         = {{2024}},
}

@inproceedings{52231,
  author       = {{Blübaum, Lukas and Heindorf, Stefan}},
  booktitle    = {{The World Wide Web Conference (WWW)}},
  location     = {{Singapore}},
  pages        = {{2204–2215}},
  publisher    = {{ACM}},
  title        = {{{Causal Question Answering with Reinforcement Learning}}},
  doi          = {{10.1145/3589334.3645610}},
  year         = {{2024}},
}

@inproceedings{56213,
  author       = {{Sapkota, Rupesh and Köhler, Dominik and Heindorf, Stefan}},
  booktitle    = {{Proceedings of the 33rd ACM International Conference on Information and Knowledge Management (CIKM ’24),}},
  location     = {{Boise, Idaho, USA}},
  publisher    = {{ACM}},
  title        = {{{EDGE: Evaluation Framework for Logical vs. Subgraph Explanations for Node Classifiers on Knowledge Graphs}}},
  doi          = {{10.1145/3627673.3679904}},
  year         = {{2024}},
}

@inbook{56214,
  author       = {{Li, Jiayi and Satheesh, Sheetal and Heindorf, Stefan and Moussallem, Diego and Speck, René and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{Communications in Computer and Information Science}},
  isbn         = {{9783031637865}},
  issn         = {{1865-0929}},
  location     = {{Malta, Valletta}},
  publisher    = {{Springer Nature Switzerland}},
  title        = {{{AutoCL: AutoML for Concept Learning}}},
  doi          = {{10.1007/978-3-031-63787-2_7}},
  year         = {{2024}},
}

@inproceedings{55653,
  abstract     = {{We consider the problem of class expression learning using cardinality-minimal sets of examples. Recent class expression learning approaches employ deep neural networks and have demonstrated tremendous performance improvements in execution time and quality of the computed solutions. However, they lack generalization capabilities when it comes to the number of examples used in a learning problem, i.e., they often perform poorly on unseen learning problems where only a few examples are given. In this work, we propose a generalization of the classical class expression learning problem to address the limitations above. In short, our generalized learning problem (GLP) forces learning systems to solve the classical class expression learning problem using the smallest possible subsets of examples, thereby improving the learning systems' ability to solve unseen learning problems with arbitrary numbers of examples. Moreover, we develop ROCES, a learning algorithm for synthesis-based approaches to solve GLP. Experimental results suggest that post training, ROCES outperforms existing synthesis-based approaches on out-of-distribution learning problems while remaining highly competitive overall.}},
  author       = {{KOUAGOU, N'Dah Jean and Heindorf, Stefan and Demir, Caglar and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{Proceedings of the Thirty-ThirdInternational Joint Conference on Artificial Intelligence}},
  publisher    = {{International Joint Conferences on Artificial Intelligence Organization}},
  title        = {{{ROCES: Robust Class Expression Synthesis in Description Logics via Iterative Sampling}}},
  doi          = {{10.24963/ijcai.2024/479}},
  year         = {{2024}},
}

@inbook{46460,
  author       = {{Ngonga Ngomo, Axel-Cyrille and Demir, Caglar and Kouagou, N'Dah Jean and Heindorf, Stefan and Karalis, Nikoloas and Bigerl, Alexander}},
  booktitle    = {{Compendium of Neurosymbolic Artificial Intelligence}},
  pages        = {{272–286}},
  publisher    = {{IOS Press}},
  title        = {{{Class Expression Learning with Multiple Representations}}},
  year         = {{2023}},
}

@article{46248,
  author       = {{Demir, Caglar and Wiebesiek, Michel and Lu, Renzhong and Ngonga Ngomo, Axel-Cyrille and Heindorf, Stefan}},
  journal      = {{ECML PKDD}},
  location     = {{Torino}},
  title        = {{{LitCQD: Multi-Hop Reasoning in Incomplete Knowledge Graphs with Numeric Literals}}},
  year         = {{2023}},
}

@inbook{47421,
  abstract     = {{Class expression learning in description logics has long been regarded as an iterative search problem in an infinite conceptual space. Each iteration of the search process invokes a reasoner and a heuristic function. The reasoner finds the instances of the current expression, and the heuristic function computes the information gain and decides on the next step to be taken. As the size of the background knowledge base grows, search-based approaches for class expression learning become prohibitively slow. Current neural class expression synthesis (NCES) approaches investigate the use of neural networks for class expression learning in the attributive language with complement (ALC). While they show significant improvements over search-based approaches in runtime and quality of the computed solutions, they rely on the availability of pretrained embeddings for the input knowledge base. Moreover, they are not applicable to ontologies in more expressive description logics. In this paper, we propose a novel NCES approach which extends the state of the art to the description logic ALCHIQ(D). Our extension, dubbed NCES2, comes with an improved training data generator and does not require pretrained embeddings for the input knowledge base as both the embedding model and the class expression synthesizer are trained jointly. Empirical results on benchmark datasets suggest that our approach inherits the scalability capability of current NCES instances with the additional advantage that it supports more complex learning problems. NCES2 achieves the highest performance overall when compared to search-based approaches and to its predecessor NCES. We provide our source code, datasets, and pretrained models at https://github.com/dice-group/NCES2.}},
  author       = {{Kouagou, N'Dah Jean and Heindorf, Stefan and Demir, Caglar and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{Machine Learning and Knowledge Discovery in Databases: Research Track}},
  isbn         = {{9783031434204}},
  issn         = {{0302-9743}},
  location     = {{Turin}},
  publisher    = {{Springer Nature Switzerland}},
  title        = {{{Neural Class Expression Synthesis in ALCHIQ(D)}}},
  doi          = {{10.1007/978-3-031-43421-1_12}},
  year         = {{2023}},
}

@unpublished{37937,
  abstract     = {{Knowledge bases are widely used for information management on the web,
enabling high-impact applications such as web search, question answering, and
natural language processing. They also serve as the backbone for automatic
decision systems, e.g. for medical diagnostics and credit scoring. As
stakeholders affected by these decisions would like to understand their
situation and verify fair decisions, a number of explanation approaches have
been proposed using concepts in description logics. However, the learned
concepts can become long and difficult to fathom for non-experts, even when
verbalized. Moreover, long concepts do not immediately provide a clear path of
action to change one's situation. Counterfactuals answering the question "How
must feature values be changed to obtain a different classification?" have been
proposed as short, human-friendly explanations for tabular data. In this paper,
we transfer the notion of counterfactuals to description logics and propose the
first algorithm for generating counterfactual explanations in the description
logic $\mathcal{ELH}$. Counterfactual candidates are generated from concepts
and the candidates with fewest feature changes are selected as counterfactuals.
In case of multiple counterfactuals, we rank them according to the likeliness
of their feature combinations. For evaluation, we conduct a user survey to
investigate which of the generated counterfactual candidates are preferred for
explanation by participants. In a second study, we explore possible use cases
for counterfactual explanations.}},
  author       = {{Sieger, Leonie Nora and Heindorf, Stefan and Blübaum, Lukas and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{arXiv:2301.05109}},
  title        = {{{Explaining ELH Concept Descriptions through Counterfactual Reasoning}}},
  year         = {{2023}},
}

@inproceedings{54612,
  author       = {{KOUAGOU, N'Dah Jean and Heindorf, Stefan and Demir, Caglar and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{NeSy 2023, 17th International Workshop on Neural-Symbolic Learning and Reasoning, Certosa di Pontignano, Siena, Italy}},
  keywords     = {{318 SFB-TRR demir dice enexa heindorf knowgraphs kouagou ngonga sail}},
  publisher    = {{CEUR-WS}},
  title        = {{{Neural Class Expression Synthesis (Extended Abstract)}}},
  year         = {{2023}},
}

@inproceedings{33734,
  abstract     = {{Many applications require explainable node classification in knowledge graphs. Towards this end, a popular ``white-box'' approach is class expression learning: Given sets of positive and negative nodes, class expressions in description logics are learned that separate positive from negative nodes. Most existing approaches are search-based approaches generating many candidate class expressions and selecting the best one. However, they often take a long time to find suitable class expressions. In this paper, we cast class expression learning as a translation problem and propose a new family of class expression learning approaches which we dub neural class expression synthesizers. Training examples are ``translated'' into class expressions in a fashion akin to machine translation. Consequently, our synthesizers are not subject to the runtime limitations of search-based approaches. We study three instances of this novel family of approaches based on LSTMs, GRUs, and set transformers, respectively. An evaluation of our approach on four benchmark datasets suggests that it can effectively synthesize high-quality class expressions with respect to the input examples in approximately one second on average. Moreover, a comparison to state-of-the-art approaches suggests that we achieve better F-measures on large datasets. For reproducibility purposes, we provide our implementation as well as pretrained models in our public GitHub repository at https://github.com/dice-group/NeuralClassExpressionSynthesis}},
  author       = {{KOUAGOU, N'Dah Jean and Heindorf, Stefan and Demir, Caglar and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{The Semantic Web - 20th Extended Semantic Web Conference (ESWC 2023)}},
  editor       = {{Pesquita, Catia and Jimenez-Ruiz, Ernesto and McCusker, Jamie and Faria, Daniel and Dragoni, Mauro and Dimou, Anastasia and Troncy, Raphael and Hertling, Sven}},
  keywords     = {{Neural network, Concept learning, Description logics}},
  location     = {{Hersonissos, Crete, Greece}},
  pages        = {{209 -- 226}},
  publisher    = {{Springer International Publishing}},
  title        = {{{Neural Class Expression Synthesis}}},
  doi          = {{https://doi.org/10.1007/978-3-031-33455-9_13}},
  volume       = {{13870}},
  year         = {{2023}},
}

@inproceedings{46575,
  author       = {{Baci, Alkid and Heindorf, Stefan}},
  booktitle    = {{CIKM}},
  location     = {{Birmingham, UK}},
  pages        = {{3733–3737}},
  title        = {{{Accelerating Concept Learning via Sampling}}},
  doi          = {{10.1145/3583780.3615158}},
  year         = {{2023}},
}

@inproceedings{33957,
  abstract     = {{Manufacturing companies are challenged to make the increasingly complex work processes equally manageable for all employees to prevent an impending loss of competence. In this contribution, an intelligent assistance system is proposed enabling employees to help themselves in the workplace and provide them with competence-related support. This results in increasing the short- and long-term efficiency of problem solving in companies.}},
  author       = {{Deppe, Sahar and Brandt, Lukas and Brünninghaus, Marc and Papenkordt, Jörg and Heindorf, Stefan and Tschirner-Vinke, Gudrun}},
  keywords     = {{Assistance system, Knowledge graph, Information retrieval, Neural networks, AR}},
  location     = {{Stuttgart}},
  title        = {{{AI-Based Assistance System for Manufacturing}}},
  doi          = {{10.1109/ETFA52439.2022.9921520}},
  year         = {{2022}},
}

@inbook{33740,
  author       = {{KOUAGOU, N'Dah Jean and Heindorf, Stefan and Demir, Caglar and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{The Semantic Web}},
  isbn         = {{9783031069802}},
  issn         = {{0302-9743}},
  publisher    = {{Springer International Publishing}},
  title        = {{{Learning Concept Lengths Accelerates Concept Learning in ALC}}},
  doi          = {{10.1007/978-3-031-06981-9_14}},
  year         = {{2022}},
}

@inproceedings{29290,
  abstract     = {{Classifying nodes in knowledge graphs is an important task, e.g., predicting
missing types of entities, predicting which molecules cause cancer, or
predicting which drugs are promising treatment candidates. While black-box
models often achieve high predictive performance, they are only post-hoc and
locally explainable and do not allow the learned model to be easily enriched
with domain knowledge. Towards this end, learning description logic concepts
from positive and negative examples has been proposed. However, learning such
concepts often takes a long time and state-of-the-art approaches provide
limited support for literal data values, although they are crucial for many
applications. In this paper, we propose EvoLearner - an evolutionary approach
to learn ALCQ(D), which is the attributive language with complement (ALC)
paired with qualified cardinality restrictions (Q) and data properties (D). We
contribute a novel initialization method for the initial population: starting
from positive examples (nodes in the knowledge graph), we perform biased random
walks and translate them to description logic concepts. Moreover, we improve
support for data properties by maximizing information gain when deciding where
to split the data. We show that our approach significantly outperforms the
state of the art on the benchmarking framework SML-Bench for structured machine
learning. Our ablation study confirms that this is due to our novel
initialization method and support for data properties.}},
  author       = {{Heindorf, Stefan and Blübaum, Lukas and Düsterhus, Nick and Werner, Till and Golani, Varun Nandkumar and Demir, Caglar and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{WWW}},
  pages        = {{818--828}},
  publisher    = {{ACM}},
  title        = {{{EvoLearner: Learning Description Logics with Evolutionary Algorithms}}},
  doi          = {{10.1145/3485447.3511925}},
  year         = {{2022}},
}

