@inproceedings{61753,
  abstract     = {{This paper presents LOLA, a massively multilingual large language model trained on more than 160 languages using a sparse Mixture-of-Experts Transformer architecture. Our architectural and implementation choices address the challenge of harnessing linguistic diversity while maintaining efficiency and avoiding the common pitfalls of multilinguality. Our analysis of the evaluation results shows competitive performance in natural language generation and understanding tasks. Additionally, we demonstrate how the learned expert-routing mechanism exploits implicit phylogenetic linguistic patterns to potentially alleviate the curse of multilinguality. We provide an in-depth look at the training process, an analysis of the datasets, and a balanced exploration of the model{’}s strengths and limitations. As an open-source model, LOLA promotes reproducibility and serves as a robust foundation for future research. Our findings enable the development of compute-efficient multilingual models with strong, scalable performance across languages.}},
  author       = {{Srivastava, Nikit and Kuchelev, Denis and Moteu Ngoli, Tatiana and Shetty, Kshitij and Röder, Michael and Zahera, Hamada Mohamed Abdelsamee and Moussallem, Diego and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{Proceedings of the 31st International Conference on Computational Linguistics}},
  editor       = {{Rambow, Owen and Wanner, Leo and Apidianaki, Marianna and Al-Khalifa, Hend and Eugenio, Barbara Di and Schockaert, Steven}},
  pages        = {{6420–6446}},
  publisher    = {{Association for Computational Linguistics}},
  title        = {{{LOLA – An Open-Source Massively Multilingual Large Language Model}}},
  year         = {{2025}},
}

@inproceedings{50797,
  author       = {{Röder, Michael and Kuchelev, Denis and Ngonga Ngomo, Axel-Cyrille}},
  booktitle    = {{Knowledge Graphs and Semantic Web}},
  editor       = {{Ortiz-Rodriguez, Fernando and Villazón-Terrazas, Boris and Tiwari, Sanju and Bobed, Carlos}},
  isbn         = {{978-3-031-47745-4}},
  keywords     = {{sail dice roeder kuchelev ngonga}},
  pages        = {{183–198}},
  publisher    = {{Springer Nature Switzerland}},
  title        = {{{A Topic Model for the Data Web}}},
  doi          = {{10.1007/978-3-031-47745-4_14}},
  year         = {{2023}},
}

@inproceedings{54614,
  author       = {{Srivastava, Nikit and Perevalov, Aleksandr and Kuchelev, Denis and Moussallem, Diego and Ngonga Ngomo, Axel-Cyrille and Both, Andreas}},
  booktitle    = {{Proceedings of the 12th Knowledge Capture Conference 2023, {K-CAP} 2023, Pensacola, FL, USA, December 5-7, 2023}},
  editor       = {{Venable, Kristen Brent and Garijo, Daniel and Jalaian, Brian}},
  keywords     = {{dice kuchelev moussallem ngonga srivastava}},
  pages        = {{122–130}},
  publisher    = {{ACM}},
  title        = {{{Lingua Franca - Entity-Aware Machine Translation Approach for Question Answering over Knowledge Graphs}}},
  doi          = {{10.1145/3587259.3627567}},
  year         = {{2023}},
}

@inproceedings{57274,
  author       = {{Srivastava, Nikit and Perevalov, Aleksandr and Kuchelev, Denis and Moussallem, Diego and Ngonga Ngomo, Axel-Cyrille and Both, Andreas}},
  booktitle    = {{Proceedings of the 12th Knowledge Capture Conference 2023}},
  publisher    = {{ACM}},
  title        = {{{Lingua Franca – Entity-Aware Machine Translation Approach for Question Answering over Knowledge Graphs}}},
  doi          = {{10.1145/3587259.3627567}},
  year         = {{2023}},
}

