@article{23456,
author = {{Mattiolo, Davide and Steffen, Eckhard}},
issn = {{0364-9024}},
journal = {{Journal of Graph Theory}},
keywords = {{factorization, perfect matchings, regular graphs, r-graphs}},
number = {{1}},
pages = {{107--116}},
title = {{{Highly edge‐connected regular graphs without large factorizable subgraphs}}},
doi = {{10.1002/jgt.22729}},
volume = {{99}},
year = {{2021}},
}
@inproceedings{10586,
abstract = {{We consider the problem of transforming a given graph G_s into a desired graph G_t by applying a minimum number of primitives from a particular set of local graph transformation primitives. These primitives are local in the sense that each node can apply them based on local knowledge and by affecting only its 1-neighborhood. Although the specific set of primitives we consider makes it possible to transform any (weakly) connected graph into any other (weakly) connected graph consisting of the same nodes, they cannot disconnect the graph or introduce new nodes into the graph, making them ideal in the context of supervised overlay network transformations. We prove that computing a minimum sequence of primitive applications (even centralized) for arbitrary G_s and G_t is NP-hard, which we conjecture to hold for any set of local graph transformation primitives satisfying the aforementioned properties. On the other hand, we show that this problem admits a polynomial time algorithm with a constant approximation ratio.}},
author = {{Scheideler, Christian and Setzer, Alexander}},
booktitle = {{Proceedings of the 46th International Colloquium on Automata, Languages, and Programming}},
keywords = {{Graphs transformations, NP-hardness, approximation algorithms}},
location = {{Patras, Greece}},
pages = {{150:1----150:14}},
publisher = {{Dagstuhl Publishing}},
title = {{{On the Complexity of Local Graph Transformations}}},
doi = {{10.4230/LIPICS.ICALP.2019.150}},
volume = {{132}},
year = {{2019}},
}
@inproceedings{15921,
abstract = {{Ranking plays a central role in a large number of applications driven by RDF knowledge graphs. Over the last years, many popular RDF knowledge graphs have grown so large that rankings for the facts they contain cannot be computed directly using the currently common 64-bit platforms. In this paper, we tackle two problems:
Computing ranks on such large knowledge bases efficiently and incrementally. First, we present D-HARE, a distributed approach for computing ranks on very large knowledge graphs. D-HARE assumes the random surfer model and relies on data partitioning to compute matrix multiplications and transpositions on disk for matrices of arbitrary size. Moreover, the data partitioning underlying D-HARE allows the execution of most of its steps in parallel.
As very large knowledge graphs are often updated periodically, we tackle the incremental computation of ranks on large knowledge bases as a second problem. We address this problem by presenting
I-HARE, an approximation technique for calculating the overall ranking scores of a knowledge without the need to recalculate the ranking from scratch at each new revision. We evaluate our approaches by calculating ranks on the 3 × 10^9 and 2.4 × 10^9 triples from Wikidata resp. LinkedGeoData. Our evaluation demonstrates
that D-HARE is the first holistic approach for computing ranks on very large RDF knowledge graphs. In addition, our incremental approach achieves a root mean squared error of less than 10E−7 in the best case. Both D-HARE
and I-HARE are open-source and are available at: https://github.com/dice-group/incrementalHARE.
}},
author = {{Desouki, Abdelmoneim Amer and Röder, Michael and Ngonga Ngomo, Axel-Cyrille}},
booktitle = {{Proceedings of the 30th ACM Conference on Hypertext and Social Media - HT '19}},
isbn = {{9781450368858}},
keywords = {{Knowledge Graphs, Ranking, RDF}},
pages = {{163--171}},
publisher = {{ACM}},
title = {{{Ranking on Very Large Knowledge Graphs}}},
doi = {{10.1145/3342220.3343660}},
year = {{2019}},
}