@inproceedings{60958,
  abstract     = {{Large Language Models (LLMs) excel in understanding, generating, and processing human language, with growing adoption in process mining. Process mining relies on event logs that capture explicit process knowledge; however, knowledge-intensive processes (KIPs) in domains such as healthcare and product development depend on tacit knowledge, which is often absent from event logs. To bridge this gap, this study proposes a LLM-based framework for mobilizing tacit process knowledge and enriching event logs. A proof-of-concept is demonstrated using a KIP-specific LLM-driven conversational agent built on GPT-4o. The results indicate that LLMs can capture tacit process knowledge through targeted queries and systematically integrate it into event logs. This study presents a novel approach combining LLMs, knowledge management, and process mining, advancing the understanding and management of KIPs by enhancing knowledge accessibility and documentation.}},
  author       = {{Brennig, Katharina}},
  booktitle    = {{AMCIS 2025 Proceedings. 11.}},
  keywords     = {{Process Mining, Large Language Model, Knowledge Management, Knowledge-Intensive Process, Tacit Knowledge}},
  location     = {{Montréal}},
  title        = {{{Revealing the Unspoken: Using LLMs to Mobilize and Enrich Tacit Knowledge in Event Logs of Knowledge-Intensive Processes}}},
  year         = {{2025}},
}

@inbook{62701,
  abstract     = {{Learning  continuous  vector  representations  for  knowledge graphs has signiﬁcantly improved state-of-the-art performances in many challenging tasks. Yet, deep-learning-based models are only post-hoc and locally explainable. In contrast, learning Web Ontology Language (OWL) class  expressions  in  Description  Logics  (DLs)  is  ante-hoc  and  globally explainable. However, state-of-the-art learners have two well-known lim-itations:  scaling  to  large  knowledge  graphs  and  handling  missing  infor-mation.  Here,  we  present  a  decision-tree-based  learner  (tDL)  to  learn Web  Ontology  Languages  (OWLs)  class  expressions  over  large  knowl-edge graphs, while imputing missing triples. Given positive and negative example individuals, tDL  ﬁrstly constructs unique OWL expressions in .SHOIN from  concise  bounded  descriptions  of  individuals.  Each  OWL class expression is used as a feature in a binary classiﬁcation problem to represent input individuals. Thereafter, tDL  ﬁts a CART decision tree to learn Boolean decision rules distinguishing positive examples from nega-tive examples. A ﬁnal OWL expression in.SHOIN is built by traversing the  built  CART  decision  tree  from  the  root  node  to  leaf  nodes  for  each positive example. By this, tDL  can learn OWL class expressions without exploration, i.e., the number of queries to a knowledge graph is bounded by the number of input individuals. Our empirical results show that tDL outperforms  the  current state-of-the-art  models  across datasets. Impor-tantly, our experiments over a large knowledge graph (DBpedia with 1.1 billion triples) show that tDL  can eﬀectively learn accurate OWL class expressions,  while  the  state-of-the-art  models  fail  to  return  any  results. Finally,  expressions  learned  by  tDL  can  be  seamlessly  translated  into natural language explanations using a pre-trained large language model and a DL verbalizer.}},
  author       = {{Demir, Caglar and Yekini, Moshood and Röder, Michael and Mahmood, Yasir and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{Lecture Notes in Computer Science}},
  isbn         = {{9783032060655}},
  issn         = {{0302-9743}},
  keywords     = {{Decision Tree, OWL Class Expression Learning, Description Logic, Knowledge Graph, Large Language Model, Verbalizer}},
  location     = {{Porto, Portugal}},
  publisher    = {{Springer Nature Switzerland}},
  title        = {{{Tree-Based OWL Class Expression Learner over Large Graphs}}},
  doi          = {{10.1007/978-3-032-06066-2_29}},
  year         = {{2025}},
}

