var bibbase_data = {"data":"\"Loading..\"\n\n
\n\n \n\n \n\n \n \n\n \n\n \n \n\n \n\n \n
\n generated by\n \n \"bibbase.org\"\n\n \n
\n \n\n
\n\n \n\n\n
\n\n Excellent! Next you can\n create a new website with this list, or\n embed it in an existing web page by copying & pasting\n any of the following snippets.\n\n
\n JavaScript\n (easiest)\n
\n \n <script src=\"https://bibbase.org/show?bib=http://rvb.su/rvb.bib&sort=title&jsonp=1&theme=mila&jsonp=1\"></script>\n \n
\n\n PHP\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=http://rvb.su/rvb.bib&sort=title&jsonp=1&theme=mila\");\n print_r($contents);\n ?>\n \n
\n\n iFrame\n (not recommended)\n
\n \n <iframe src=\"https://bibbase.org/show?bib=http://rvb.su/rvb.bib&sort=title&jsonp=1&theme=mila\"></iframe>\n \n
\n\n

\n For more details see the documention.\n

\n
\n
\n\n
\n\n This is a preview! To use this list on your own web site\n or create a new web site from it,\n create a free account. The file will be added\n and you will be able to edit it in the File Manager.\n We will show you instructions once you've created your account.\n
\n\n
\n\n

To the site owner:

\n\n

Action required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n

    \n
  1. renew the authorization for BibBase on Mendeley, and
  2. \n
  3. update the BibBase URL\n in your page the same way you did when you initially set up\n this page.\n
  4. \n
\n

\n\n

\n \n \n Fix it now\n

\n
\n\n
\n\n\n
\n \n \n
\n
\n  \n 2024\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n A quadratic-order problem kernel for the traveling salesman problem parameterized by the vertex cover number.\n \n \n \n \n\n\n \n van Bevern, R.; and Skachkov, D. A.\n\n\n \n\n\n\n Operations Research Letters, 52: 107065. 2024.\n \n\n\n\n
\n\n\n\n \n \n \"A preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BS24,\n  title =\t {A quadratic-order problem kernel for the traveling\n                  salesman problem parameterized by the vertex cover\n                  number},\n  author =\t {René van Bevern and Daniel A. Skachkov},\n  year =\t {2024},\n  date =\t {2024-01},\n  pages =\t {107065},\n  volume =\t {52},\n  journal =\t {Operations Research Letters},\n  doi =\t\t {10.1016/j.orl.2024.107065},\n  url_Preprint = {10.48550/arXiv.2207.08678}\n}\n\n\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Serial and parallel kernelization of Multiple Hitting Set parameterized by the Dilworth number, implemented on the GPU.\n \n \n \n \n\n\n \n van Bevern, R.; Kirilin, A. M.; Skachkov, D. A.; Smirnov, P. V.; and Tsidulko, O.\n\n\n \n\n\n\n Journal of Computer and System Sciences, 139: 103479. 2024.\n \n\n\n\n
\n\n\n\n \n \n \"Serial preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BKS+24,\n  url_Preprint = {10.48550/ARXIV.2109.06042},\n  doi =\t\t {10.1016/j.jcss.2023.103479},\n  author =\t {van Bevern, René and Kirilin, Artem M. and Skachkov,\n                  Daniel A. and Smirnov, Pavel V. and Tsidulko, Oxana},\n  title =\t {Serial and parallel kernelization of Multiple\n                  Hitting Set parameterized by the Dilworth number,\n                  implemented on the GPU},\n  journal =\t {Journal of Computer and System Sciences},\n  year =\t {2024},\n  date =\t {2024-02},\n  volume =\t {139},\n  pages =\t {103479}\n}\n\n\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2023\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n On data reduction for dynamic vector bin packing.\n \n \n \n \n\n\n \n van Bevern, R.; Melnikov, A.; Smirnov, P.; and Tsidulko, O.\n\n\n \n\n\n\n Operations Research Letters, 51: 446-452. 2023.\n \n\n\n\n
\n\n\n\n \n \n \"On preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BMST23,\n  url_Preprint = {http://doi.org/10.48550/ARXIV.2205.08769},\n  doi =\t\t {10.1016/j.orl.2023.06.005},\n  author =\t {van Bevern, René and Melnikov, Andrey and Smirnov,\n                  Pavel and Tsidulko, Oxana},\n  title =\t {On data reduction for dynamic vector bin packing},\n  journal =\t {Operations Research Letters},\n  volume =\t {51},\n  pages =\t {446-452},\n  year =\t {2023},\n  date =\t {2023-07-04}\n}\n\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Polynomial-time data reduction for weighted problems beyond additive goal functions.\n \n \n \n \n\n\n \n Bentert, M.; van Bevern, R.; Fluschnik, T.; Nichterlein, A.; and Niedermeier, R.\n\n\n \n\n\n\n Discrete Applied Mathematics, 328: 117–133. March 2023.\n \n\n\n\n
\n\n\n\n \n \n \"Polynomial-time preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BBF+23,\n  doi =\t\t {10.1016/j.dam.2022.11.018},\n  url_Preprint = {https://doi.org/10.1016/j.dam.2022.11.018},\n  year =\t {2023},\n  month =\t mar,\n  publisher =\t {Elsevier {BV}},\n  volume =\t {328},\n  pages =\t {117--133},\n  author =\t {Matthias Bentert and Ren{\\'{e}} van Bevern and Till\n                  Fluschnik and Andr{\\'{e}} Nichterlein and Rolf\n                  Niedermeier},\n  title =\t {Polynomial-time data reduction for weighted problems\n                  beyond additive goal functions},\n  journal =\t {Discrete Applied Mathematics}\n}\n\n
\n
\n\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2022\n \n \n (1)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Parameterized algorithms for power-efficiently connecting wireless sensor networks: Theory and experiments.\n \n \n \n \n\n\n \n Bentert, M.; van Bevern, R.; Nichterlein, A.; Niedermeier, R.; and Smirnov, P. V.\n\n\n \n\n\n\n INFORMS Journal on Computing, 34(1): 55–75. 2022.\n \n\n\n\n
\n\n\n\n \n \n \"Parameterized code\n  \n \n \n \"Parameterized slides\n  \n \n \n \"Parameterized preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{BBN+22,\n  author =\t {{Bentert}, Matthias and René van Bevern and\n                  {Nichterlein}, André and {Niedermeier}, Rolf and\n                  Smirnov, Pavel V.},\n  title =\t {Parameterized algorithms for power-efficiently\n                  connecting wireless sensor networks: Theory and\n                  experiments},\n  journal =\t {INFORMS Journal on Computing},\n  year =\t 2022,\n  abstract =\t {We study an NP-hard problem motivated by\n                  energy-efficiently maintaining the connectivity of a\n                  symmetric wireless sensor communication network:\n                  Given an edge-weighted n-vertex graph, find a\n                  connected spanning subgraph of minimum cost, where\n                  the cost is determined by letting each vertex pay\n                  the most expensive edge incident to it in the\n                  subgraph. On the negative side, we show that\n                  o(logn)-approximating the difference d between the\n                  optimal solution cost and a natural lower bound is\n                  NP-hard and that, under the Exponential Time\n                  Hypothesis, there are no exact algorithms running in\n                  2^o(n) time or in f(d)⋅n^O(1) time for any\n                  computable function f. On the positive side, we\n                  provide an algorithm that reconnects O(logn)\n                  connected components with minimum additional cost in\n                  polynomial time. These algorithms are motivated by\n                  application scenarios of monitoring areas or where\n                  an existing sensor network may fall apart into\n                  several connected components due to sensor\n                  faults. In experiments, the algorithm solves\n                  instances with four such connected components and\n                  about 8 000 vertices in five minutes, outperforming\n                  CPLEX with known ILP formulations for the problem.},\n  url_Code =\t {https://gitlab.com/rvb/mpsc},\n  url_Slides =\n                  {https://www.researchgate.net/publication/328262005_The_min-power_symmetric_connectivity_problem_with_few_obligatory_network_components},\n  doi =\t\t {10.1287/ijoc.2020.1045},\n  url_Preprint = {https://arxiv.org/abs/1706.03177},\n  volume =\t 34,\n  number =\t 1,\n  pages =\t {55--75},\n  date =\t {2022-03-22},\n  keywords =\t {connected spanning subgraphs, monitoring areas,\n                  reconnecting sensor networks, parameterized\n                  complexity analysis, approximation hardness,\n                  parameterization above lower bounds, color-coding,\n                  experimental comparison},\n}\n\n\n
\n
\n\n\n
\n We study an NP-hard problem motivated by energy-efficiently maintaining the connectivity of a symmetric wireless sensor communication network: Given an edge-weighted n-vertex graph, find a connected spanning subgraph of minimum cost, where the cost is determined by letting each vertex pay the most expensive edge incident to it in the subgraph. On the negative side, we show that o(logn)-approximating the difference d between the optimal solution cost and a natural lower bound is NP-hard and that, under the Exponential Time Hypothesis, there are no exact algorithms running in 2^o(n) time or in f(d)⋅n^O(1) time for any computable function f. On the positive side, we provide an algorithm that reconnects O(logn) connected components with minimum additional cost in polynomial time. These algorithms are motivated by application scenarios of monitoring areas or where an existing sensor network may fall apart into several connected components due to sensor faults. In experiments, the algorithm solves instances with four such connected components and about 8 000 vertices in five minutes, outperforming CPLEX with known ILP formulations for the problem.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2021\n \n \n (3)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Representative families for matroid intersections, with applications to location, packing, and covering problems.\n \n \n \n \n\n\n \n van Bevern, R.; Tsidulko, O.; and Zschoche, P.\n\n\n \n\n\n\n Discrete Applied Mathematics, 298: 110-128. 2021.\n \n\n\n\n
\n\n\n\n \n \n \"Representative preprint\n  \n \n \n \"Representative slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 6 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BTZ21,\n  author =\t {Ren{\\'{e}} van Bevern and Oxana Tsidulko and Philipp\n                  Zschoche},\n  title =\t {Representative families for matroid intersections,\n                  with applications to location, packing, and covering\n                  problems},\n  year =\t {2021},\n  journal =\t {Discrete Applied Mathematics},\n  volume =\t {298},\n  pages =\t {110-128},\n  date =\t {2021-03-22},\n  url_Preprint = {https://arxiv.org/abs/1806.11527},\n  doi =\t\t {10.1016/j.dam.2021.03.014},\n  url_Slides =\n                  {https://figshare.com/articles/presentation/Facility_location_under_matroid_constraints_fixed-parameter_algorithms_and_applications/13536788},\n  abstract =\t {We show algorithms for computing representative\n                  families for matroid intersections and use them in\n                  fixed-parameter algorithms for set packing, set\n                  covering, and facility location problems with\n                  multiple matroid constraints. We complement our\n                  tractability results by hardness results. },\n}\n\n
\n
\n\n\n
\n We show algorithms for computing representative families for matroid intersections and use them in fixed-parameter algorithms for set packing, set covering, and facility location problems with multiple matroid constraints. We complement our tractability results by hardness results. \n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Special Issue on Computer Science Symposium in Russia (2019).\n \n \n \n\n\n \n van Bevern, R.; and Kucherov, G.\n\n\n \n\n\n\n Theory of Computing Systems, 65(3): 441–443. April 2021.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 4 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BK21,\n  doi =\t\t {10.1007/s00224-021-10033-0},\n  year =\t {2021},\n  month =\t apr,\n  publisher =\t {Springer},\n  author =\t {Ren{\\'{e}} van Bevern and Gregory Kucherov},\n  title =\t {Special Issue on Computer Science Symposium in\n                  Russia (2019)},\n  journal =\t {Theory of Computing Systems},\n  volume =\t 65,\n  number =\t 3,\n  pages =\t {441--443},\n}\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n The Hierarchical Chinese Postman Problem: the slightest disorder makes it hard, yet disconnectedness is manageable.\n \n \n \n \n\n\n \n Afanasev, V. A.; van Bevern, R.; and Tsidulko, O.\n\n\n \n\n\n\n Operations Research Letters, 49(2): 270–277. 2021.\n \n\n\n\n
\n\n\n\n \n \n \"The preprint\n  \n \n \n \"The poster\n  \n \n \n \"The slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{ABT21,\n  title =\t {The Hierarchical Chinese Postman Problem: the\n                  slightest disorder makes it hard, yet\n                  disconnectedness is manageable},\n  author =\t {Vsevolod A. Afanasev and René van Bevern and Oxana\n                  Tsidulko},\n  journal =\t {Operations Research Letters},\n  year =\t 2021,\n  volume =\t 49,\n  number =\t 2,\n  pages =\t {270--277},\n  doi =\t\t {10.1016/j.orl.2021.01.017},\n  date =\t {2021-02-05},\n  url_Preprint = {https://arxiv.org/abs/2011.04022},\n  url_Poster =\t {https://doi.org/10.6084/m9.figshare.13536797},\n  url_Slides =\n                  {https://www.researchgate.net/publication/346675762_Hierarchical_Chinese_Postman_Problem_slightest_disorder_makes_it_hard_disconnectedness_is_manageable},\n  keywords =\t {approximation algorithm, fixed-parameter algorithm,\n                  NP-hardness, arc routing, rural postman problem,\n                  temporal graphs},\n  abstract =\t {The Hierarchical Chinese Postman Problem is finding\n                  a shortest traversal of all edges of a graph\n                  respecting precedence constraints given by a partial\n                  order on classes of edges. We show that the special\n                  case with connected classes is NP-hard even on\n                  orders decomposable into a chain and an incomparable\n                  class. For the case with linearly ordered (possibly\n                  disconnected) classes, we get 5/3-approximations and\n                  fixed-parameter algorithms by transferring results\n                  from the Rural Postman Problem.}\n}\n\n
\n
\n\n\n
\n The Hierarchical Chinese Postman Problem is finding a shortest traversal of all edges of a graph respecting precedence constraints given by a partial order on classes of edges. We show that the special case with connected classes is NP-hard even on orders decomposable into a chain and an incomparable class. For the case with linearly ordered (possibly disconnected) classes, we get 5/3-approximations and fixed-parameter algorithms by transferring results from the Rural Postman Problem.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2020\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n A historical note on the 3/2-approximation algorithm for the metric traveling salesman problem.\n \n \n \n \n\n\n \n van Bevern, R.; and Slugina, V. A.\n\n\n \n\n\n\n Historia Mathematica, 53: 118-127. 2020.\n \n\n\n\n
\n\n\n\n \n \n \"A preprint\n  \n \n \n \"A poster\n  \n \n \n \"A slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 9 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BS20b,\n  title =\t {A historical note on the 3/2-approximation algorithm\n                  for the metric traveling salesman problem},\n  author =\t {René van Bevern and Viktoriia A. Slugina},\n  year =\t {2020},\n  date =\t {2020-11-17},\n  journal =\t {Historia Mathematica},\n  url_Preprint = {https://arxiv.org/abs/2004.02437},\n  url_Poster =\n                  {https://figshare.com/articles/poster/Parallel_Roads_to_the_Traveling_Salesman_3_2-Approximation/12728723},\n  url_Slides =\n                  {https://www.researchgate.net/publication/348297719_Algoritm_Kristofidesa_ili_Kristofidesa-Serdukova_Istoria_otkrytia_i_publikacii},\n  volume =\t 53,\n  pages =\t {118-127},\n  doi =\t\t {10.1016/j.hm.2020.04.003},\n  abstract =\t {One of the most fundamental results in combinatorial\n                  optimization is the polynomial-time\n                  3/2-approximation algorithm for the metric traveling\n                  salesman problem. It was presented by Christofides\n                  in 1976 and is well known as "the Christofides\n                  algorithm". Recently, some authors started calling\n                  it "Christofides-Serdyukov algorithm", pointing out\n                  that the same result was published independently in\n                  the USSR in 1978. We provide some historic\n                  background on Serdyukov's findings and a translation\n                  of his article.}\n}\n\n\n\n
\n
\n\n\n
\n One of the most fundamental results in combinatorial optimization is the polynomial-time 3/2-approximation algorithm for the metric traveling salesman problem. It was presented by Christofides in 1976 and is well known as \"the Christofides algorithm\". Recently, some authors started calling it \"Christofides-Serdyukov algorithm\", pointing out that the same result was published independently in the USSR in 1978. We provide some historic background on Serdyukov's findings and a translation of his article.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n H-Index Manipulation by Undoing Merges.\n \n \n \n \n\n\n \n van Bevern, R.; Komusiewicz, C.; Molter, H.; Niedermeier, R.; Sorge, M.; and Walsh, T.\n\n\n \n\n\n\n Quantitative Science Studies, 1(4): 1529–1552. 2020.\n \n\n\n\n
\n\n\n\n \n \n \"H-Index preprint\n  \n \n \n \"H-Index code\n  \n \n \n \"H-Index poster\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 4 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BKM+20,\n  title =\t {H-Index Manipulation by Undoing Merges},\n  author =\t {René van Bevern and Christian Komusiewicz and\n                  Hendrik Molter and Rolf Niedermeier and Manuel Sorge\n                  and Toby Walsh},\n  doi =\t\t {10.1162/qss_a_00093},\n  url_Preprint = {http://arxiv.org/abs/1604.04827},\n  url_Code =\t {https://gitlab.com/rvb/split-index},\n  url_Poster =\n                  {https://figshare.com/articles/poster/H-index_manipulation_by_undoing_merges/13521758},\n  journal =\t {Quantitative Science Studies},\n  volume =\t 1,\n  number =\t 4,\n  pages =\t {1529--1552},\n  abstract =\t {The h-index is an important bibliographic measure\n                  used to assess the performance of\n                  researchers. Dutiful researchers merge different\n                  versions of their articles in their Google Scholar\n                  profile even though this can decrease their\n                  h-index. In this article, we study the manipulation\n                  of the h-index by undoing such merges. In contrast\n                  to manipulation by merging articles (van Bevern et\n                  al. [Artif. Intel. 240:19-35, 2016]) such\n                  manipulation is harder to detect. We present\n                  numerous results on computational complexity (from\n                  linear-time algorithms to parameterized\n                  computational hardness results) and empirically\n                  indicate that at least small improvements of the\n                  h-index by splitting merged articles are\n                  unfortunately easily achievable.},\n  year =\t 2020,\n  date =\t {2020-10-13},\n}\n\n\n\n\n\n
\n
\n\n\n
\n The h-index is an important bibliographic measure used to assess the performance of researchers. Dutiful researchers merge different versions of their articles in their Google Scholar profile even though this can decrease their h-index. In this article, we study the manipulation of the h-index by undoing such merges. In contrast to manipulation by merging articles (van Bevern et al. [Artif. Intel. 240:19-35, 2016]) such manipulation is harder to detect. We present numerous results on computational complexity (from linear-time algorithms to parameterized computational hardness results) and empirically indicate that at least small improvements of the h-index by splitting merged articles are unfortunately easily achievable.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n On approximate data reduction for the Rural Postman Problem: Theory and experiments.\n \n \n \n \n\n\n \n van Bevern, R.; Fluschnik, T.; and Tsidulko, O.\n\n\n \n\n\n\n Networks, 76(4): 485-508. 2020.\n \n\n\n\n
\n\n\n\n \n \n \"On preprint\n  \n \n \n \"On poster\n  \n \n \n \"On slides\n  \n \n \n \"On code\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 6 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BFT20b,\n  author =\t {Ren{\\'{e}} van Bevern and Till Fluschnik and Oxana\n                  Tsidulko},\n  title =\t {On approximate data reduction for the {Rural Postman\n                  Problem}: Theory and experiments},\n  year =\t {2020},\n  date =\t {2020-11-02},\n  journal =\t {Networks},\n  url_Preprint = {https://arxiv.org/abs/1812.10131},\n  url_Poster =\n                  {https://figshare.com/articles/poster/On_approximate_data_reduction_for_the_Rural_Postman_Problem/13058969},\n  url_Slides =\n                  {https://www.researchgate.net/publication/348297308_On_approximate_approximate_data_reduction_for_the_Rural_Postman_Problem_Theory_and_Experiments},\n  doi =\t\t {10.1002/net.21985},\n  volume =\t 76,\n  number =\t 4,\n  pages =\t {485-508},\n  url_Code =\t {https://gitlab.com/rvb/rpp-psaks},\n  abstract =\t {Given a graph G=(V,E) with edge weights ω:E→N∪{0}\n                  and a subset R⊆E of required edges, the Rural\n                  Postman Problem (RPP) is to find a closed walk W* of\n                  minimum weight ω(W*) containing all edges of R. We\n                  prove that RPP is WK[1]-complete parameterized by\n                  the number and cost d=ω(W^*)-ω(R)+|W*|-|R| of edges\n                  traversed additionally to the required ones, that\n                  is, presumably cannot be polynomial-time reduced to\n                  solving instances of size polynomial in d. In\n                  contrast, denoting by b≤2d the number of vertices\n                  incident to an odd number of edges of R and by c≤d\n                  the number of connected components formed by the\n                  edges in R, we show how to reduce any RPP instance I\n                  to an RPP instance I' with 2b+O(c/ε) vertices in\n                  O(n³) time so that any α-approximate solution for I'\n                  gives an α(1+ε)-approximate solution for I, for any\n                  α≥1 and ε>0. That is, we provide a polynomial-size\n                  approximate kernelization scheme (PSAKS). We make\n                  first steps towards a PSAKS for the parameter c.}\n}\n\n
\n
\n\n\n
\n Given a graph G=(V,E) with edge weights ω:E→N∪0 and a subset R⊆E of required edges, the Rural Postman Problem (RPP) is to find a closed walk W* of minimum weight ω(W*) containing all edges of R. We prove that RPP is WK[1]-complete parameterized by the number and cost d=ω(W^*)-ω(R)+|W*|-|R| of edges traversed additionally to the required ones, that is, presumably cannot be polynomial-time reduced to solving instances of size polynomial in d. In contrast, denoting by b≤2d the number of vertices incident to an odd number of edges of R and by c≤d the number of connected components formed by the edges in R, we show how to reduce any RPP instance I to an RPP instance I' with 2b+O(c/ε) vertices in O(n³) time so that any α-approximate solution for I' gives an α(1+ε)-approximate solution for I, for any α≥1 and ε>0. That is, we provide a polynomial-size approximate kernelization scheme (PSAKS). We make first steps towards a PSAKS for the parameter c.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Optimal-size problem kernels for $d$-Hitting Set in linear time and space.\n \n \n \n \n\n\n \n van Bevern, R.; and Smirnov, P. V.\n\n\n \n\n\n\n Information Processing Letters,105998. 2020.\n \n\n\n\n
\n\n\n\n \n \n \"Optimal-size preprint\n  \n \n \n \"Optimal-size code\n  \n \n \n \"Optimal-size poster\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 6 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BS20,\n  title =\t {Optimal-size problem kernels for $d$-Hitting Set in\n                  linear time and space},\n  author =\t {René van Bevern and Pavel V. Smirnov},\n  year =\t {2020},\n  date =\t {2020-10-01},\n  journal =\t {Information Processing Letters},\n  volyme =\t {163},\n  pages =\t {105998},\n  doi =\t\t {10.1016/j.ipl.2020.105998},\n  url_Preprint = {https://arxiv.org/abs/2003.04578},\n  url_Code =\t {https://gitlab.com/PavelSmirnov/hs-lintimespace},\n  url_Poster =\t {https://figshare.com/articles/poster/Optimal-size_problem_kernels_for_d-Hitting_Set_in_linear_time_and_space/12782975},\n  abstract =\t {We improve two linear-time data reduction algorithms\n                  for the d-Hitting Set problem to work in linear\n                  space, thus obtaining the first algorithms for\n                  computing problem kernels of asymptotically optimal\n                  size O(k^d) for d-Hitting Set in linear time and\n                  space. We experimentally compare the two algorithms\n                  to a classical data reduction algorithm of Weihe and\n                  evaluate their combinations.}\n}\n\n\n\n
\n
\n\n\n
\n We improve two linear-time data reduction algorithms for the d-Hitting Set problem to work in linear space, thus obtaining the first algorithms for computing problem kernels of asymptotically optimal size O(k^d) for d-Hitting Set in linear time and space. We experimentally compare the two algorithms to a classical data reduction algorithm of Weihe and evaluate their combinations.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Parameterized algorithms and data reduction for the short secluded $s$-$t$-path problem.\n \n \n \n \n\n\n \n van Bevern, R.; Fluschnik, T.; and Tsidulko, O.\n\n\n \n\n\n\n Networks, 75(1): 34-63. 2020.\n \n\n\n\n
\n\n\n\n \n \n \"Parameterized preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BFT20,\n  author =\t {Ren{\\'{e}} van Bevern and Till Fluschnik and Oxana\n                  Tsidulko},\n  title =\t {Parameterized algorithms and data reduction for the\n                  short secluded $s$-$t$-path problem},\n  journal =\t {Networks},\n  year =\t 2020,\n  date =\t {2020-01-01},\n  volume =\t {75},\n  number =\t {1},\n  pages =\t {34-63},\n  url_Preprint = {https://arxiv.org/abs/1806.09540},\n  doi =\t\t {10.1002/net.21904},\n  abstract =\t {Given a graph G=(V,E), two vertices s,t∈V, and two\n                  integers k,ℓ, we search for a simple s-t-path with\n                  at most k vertices and at most ℓ neighbors. For\n                  graphs with constant crossing number, we provide a\n                  subexponential 2^O(√n)-time algorithm, prove a\n                  matching lower bound, and show a polynomial-time\n                  data reduction algorithm that reduces any problem\n                  instance to an equivalent instance (a so-called\n                  problem kernel) of size polynomial in the vertex\n                  cover number of the input graph. In contrast, we\n                  show that the problem in general graphs is hard to\n                  preprocess. We obtain a 2^O(ω)⋅ℓ^2⋅n-time algorithm\n                  for graphs of treewidth ω, show that there is no\n                  problem kernel with size polynomial in ω, yet show a\n                  problem kernels with size polynomial in the feedback\n                  edge number of the input graph and with size\n                  polynomial in the feedback vertex number, k, and ℓ.}\n}\n\n
\n
\n\n\n
\n Given a graph G=(V,E), two vertices s,t∈V, and two integers k,ℓ, we search for a simple s-t-path with at most k vertices and at most ℓ neighbors. For graphs with constant crossing number, we provide a subexponential 2^O(√n)-time algorithm, prove a matching lower bound, and show a polynomial-time data reduction algorithm that reduces any problem instance to an equivalent instance (a so-called problem kernel) of size polynomial in the vertex cover number of the input graph. In contrast, we show that the problem in general graphs is hard to preprocess. We obtain a 2^O(ω)⋅ℓ^2⋅n-time algorithm for graphs of treewidth ω, show that there is no problem kernel with size polynomial in ω, yet show a problem kernels with size polynomial in the feedback edge number of the input graph and with size polynomial in the feedback vertex number, k, and ℓ.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2019\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n An algorithm with parameterized complexity of constructing the optimal schedule for the routing open shop problem with unit execution times.\n \n \n \n\n\n \n van Bevern, R. A.; Pyatkin, A. V.; and Sevastyanov, S. V.\n\n\n \n\n\n\n Siberian Electronic Mathematical Reports, 16: 42–84. 2019.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BPS19,\n  author =\t {René A. van Bevern and Artem V. Pyatkin and Sergey\n                  V. Sevastyanov},\n  journal =\t {Siberian Electronic Mathematical Reports},\n  title =\t {An algorithm with parameterized complexity of\n                  constructing the optimal schedule for the routing\n                  open shop problem with unit execution times},\n  date =\t {2019-01-27},\n  year =\t 2019,\n  volume =\t 16,\n  pages =\t {42--84},\n  doi =\t\t {10.33048/semi.2019.16.003},\n  issn =\t {1813-3304},\n  abstract =\t {For the Routing Open Shop problem with unit\n                  execution times, the first algorithm with\n                  parameterized complexity is designed for\n                  constructing an optimal schedule. Its running time\n                  is bounded by a function (Pol(|V |) + f(m, g))·|I|,\n                  where Pol(|V|) is a polynomial of the number of\n                  network nodes, f(m, g) is a function of the number\n                  of machines and the number of job locations, and |I|\n                  is the input length in its compact encoding.}\n}\n\n
\n
\n\n\n
\n For the Routing Open Shop problem with unit execution times, the first algorithm with parameterized complexity is designed for constructing an optimal schedule. Its running time is bounded by a function (Pol(|V |) + f(m, g))·|I|, where Pol(|V|) is a polynomial of the number of network nodes, f(m, g) is a function of the number of machines and the number of job locations, and |I| is the input length in its compact encoding.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n CSR 2019.\n \n \n \n\n\n \n van Bevern, R.; and Kucherov, G.,\n editors.\n \n\n\n \n\n\n\n Volume 11532, of Lecture Notes in Computer Science.Springer. 2019.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@proceedings{BK19,\n  editor =\t {Ren{\\'{e}} van Bevern and Gregory Kucherov},\n  title =\t {CSR 2019},\n  series =\t {Lecture Notes in Computer Science},\n  volume =\t {11532},\n  publisher =\t {Springer},\n  year =\t {2019},\n  doi =\t\t {10.1007/978-3-030-19955-5},\n  timestamp =\t {Tue, 25 Jun 2019 14:30:35 +0200},\n  biburl =\t {https://dblp.org/rec/bib/conf/csr/2019},\n  bibsource =\t {dblp computer science bibliography,\n                  https://dblp.org}\n}\n\n\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Fixed-parameter algorithms for maximum-profit facility location under matroid constraints.\n \n \n \n\n\n \n van Bevern, R.; Tsidulko, O.; and Zschoche, P.\n\n\n \n\n\n\n In Heggernes, P., editor(s), CIAC 2019, volume 11485, of Lecture Notes in Computer Science, pages 62–74. Springer, 2019.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{BTZ19,\n  author =\t {Ren{\\'{e}} van Bevern and Oxana Tsidulko and Philipp\n                  Zschoche},\n  year =\t "2019",\n  pages =\t "62--74",\n  title =\t {Fixed-parameter algorithms for maximum-profit\n                  facility location under matroid constraints},\n  editor =\t {Pinar Heggernes},\n  booktitle =\t {CIAC 2019},\n  series =\t {Lecture Notes in Computer Science},\n  publisher =\t {Springer},\n  volume =\t {11485},\n  date =\t {2019-05-27},\n  doi =\t\t {10.1007/978-3-030-17402-6_6},\n  abstract =\t {We consider a variant of the matroid median problem\n                  introduced by Krishnaswamy et al. [SODA 2011]: an\n                  uncapacitated discrete facility location problem\n                  where the task is to decide which facilities to open\n                  and which clients to serve for maximum profit so\n                  that the facilities form an independent set in given\n                  facility-side matroids and the clients form an\n                  independent set in given client-side matroids. We\n                  show that the problem is fixed-parameter tractable\n                  parameterized by the number of matroids and the\n                  minimum rank among the client-side matroids. To this\n                  end, we derive fixed-parameter algorithms for\n                  computing representative families for matroid\n                  intersections and maximum-weight set packings with\n                  multiple matroid constraints. To illustrate the\n                  modeling capabilities of the new problem, we use it\n                  to obtain algorithms for a problem in social network\n                  analysis. We complement our tractability results by\n                  lower bounds.}\n}\n\n\n
\n
\n\n\n
\n We consider a variant of the matroid median problem introduced by Krishnaswamy et al. [SODA 2011]: an uncapacitated discrete facility location problem where the task is to decide which facilities to open and which clients to serve for maximum profit so that the facilities form an independent set in given facility-side matroids and the clients form an independent set in given client-side matroids. We show that the problem is fixed-parameter tractable parameterized by the number of matroids and the minimum rank among the client-side matroids. To this end, we derive fixed-parameter algorithms for computing representative families for matroid intersections and maximum-weight set packings with multiple matroid constraints. To illustrate the modeling capabilities of the new problem, we use it to obtain algorithms for a problem in social network analysis. We complement our tractability results by lower bounds.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Inductive $k$-independent graphs and $c$-colorable subgraphs in scheduling: A review.\n \n \n \n \n\n\n \n Bentert, M.; van Bevern, R.; and Niedermeier, R.\n\n\n \n\n\n\n Journal of Scheduling, 22(1): 3–20. 2019.\n \n\n\n\n
\n\n\n\n \n \n \"Inductive preprint\n  \n \n \n \"Inductive poster\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BBN19,\n  author =\t {Matthias Bentert and Ren{\\'{e}} van Bevern and Rolf\n                  Niedermeier},\n  journal =\t {Journal of Scheduling},\n  date =\t {2019-02-01},\n  title =\t {Inductive $k$-independent graphs and $c$-colorable\n                  subgraphs in scheduling: A review},\n  year =\t 2019,\n  doi =\t\t {10.1007/s10951-018-0595-8},\n  volume =\t {22},\n  number =\t 1,\n  pages =\t {3--20},\n  url_Preprint = {https://arxiv.org/abs/1712.06481},\n  url_Poster =\n                  {https://www.researchgate.net/publication/342122158_Wireless_Scheduling_Graph_Classes_and_c-Colorable_Subgraphs},\n  abstract =\t {Inductive k-independent graphs generalize chordal\n                  graphs and have recently been advocated in the\n                  context of interference-avoiding wireless\n                  communication scheduling. The NP-hard problem of\n                  finding maximum-weight induced c-colorable\n                  subgraphs, which is a generalization of finding\n                  maximum independent sets, naturally occurs when\n                  selecting c sets of pairwise non-conflicting jobs\n                  (modeled as graph vertices). We investigate the\n                  parameterized complexity of this problem on\n                  inductive k-independent graphs. We show that the\n                  Independent Set problem is W[1]-hard even on\n                  2-simplicial 3-minoes---a subclass of inductive\n                  2-independent graphs. In contrast, we prove that the\n                  more general Maximum c-Colorable Subgraph problem is\n                  fixed-parameter tractable on edge-wise unions of\n                  cluster and chordal graphs, which are\n                  2-simplicial. In both cases, the parameter is the\n                  solution size. Aside from this, we survey other\n                  graph classes between inductive 1-inductive and\n                  inductive 2-inductive graphs with applications in\n                  scheduling.}\n}\n\n\n
\n
\n\n\n
\n Inductive k-independent graphs generalize chordal graphs and have recently been advocated in the context of interference-avoiding wireless communication scheduling. The NP-hard problem of finding maximum-weight induced c-colorable subgraphs, which is a generalization of finding maximum independent sets, naturally occurs when selecting c sets of pairwise non-conflicting jobs (modeled as graph vertices). We investigate the parameterized complexity of this problem on inductive k-independent graphs. We show that the Independent Set problem is W[1]-hard even on 2-simplicial 3-minoes—a subclass of inductive 2-independent graphs. In contrast, we prove that the more general Maximum c-Colorable Subgraph problem is fixed-parameter tractable on edge-wise unions of cluster and chordal graphs, which are 2-simplicial. In both cases, the parameter is the solution size. Aside from this, we survey other graph classes between inductive 1-inductive and inductive 2-inductive graphs with applications in scheduling.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n On $(1+ɛ)$-approximate data reduction for the Rural Postman Problem.\n \n \n \n \n\n\n \n van Bevern, R.; Fluschnik, T.; and Tsidulko, O.\n\n\n \n\n\n\n In Khachay, M.; Kochetov, Y.; and Pardalos, P., editor(s), MOTOR 2019, volume 11548, of Lecture Notes in Computer Science, pages 279–294. Springer, 2019.\n \n\n\n\n
\n\n\n\n \n \n \"On preprint\n  \n \n \n \"On slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{BFT19,\n  volume =\t {11548},\n  pages =\t {279--294},\n  author =\t {Ren{\\'{e}} van Bevern and Till Fluschnik and Oxana\n                  Tsidulko},\n  title =\t {On $(1+\\varepsilon)$-approximate data reduction for\n                  the {Rural Postman Problem}},\n  year =\t 2019,\n  date =\t {2019-07-06},\n  booktitle =\t {MOTOR 2019},\n  editor =\t {Michael Khachay and Yury Kochetov and Panos\n                  Pardalos},\n  series =\t {Lecture Notes in Computer Science},\n  publisher =\t {Springer},\n  url_Preprint = {https://arxiv.org/abs/1812.10131v1},\n  url_Slides =\n                  {https://www.researchgate.net/publication/342122216_On_1e-approximate_data_reduction_for_the_Rural_Postman_Problem},\n  doi =\t\t {10.1007/978-3-030-22629-9_20},\n  abstract =\t {Given a graph G=(V,E) with edge weights ω:E→N∪{0}\n                  and a subset R⊆E of required edges, the Rural\n                  Postman Problem (RPP) is to find a closed walk W* of\n                  minimum weight ω(W*) containing all edges of R. We\n                  prove that RPP is WK[1]-complete parameterized by\n                  the number and cost d=ω(W^*)-ω(R)+|W*|-|R| of edges\n                  traversed additionally to the required ones, that\n                  is, presumably cannot be polynomial-time reduced to\n                  solving instances of size polynomial in d. In\n                  contrast, denoting by b≤2d the number of vertices\n                  incident to an odd number of edges of R and by c≤d\n                  the number of connected components formed by the\n                  edges in R, we show how to reduce any RPP instance I\n                  to an RPP instance I' with 2b+O(c/ε) vertices in\n                  O(n³) time so that any α-approximate solution for I'\n                  gives an α(1+ε)-approximate solution for I, for any\n                  α≥1 and ε>0. That is, we provide a polynomial-size\n                  approximate kernelization scheme (PSAKS). We make\n                  first steps towards a PSAKS for the parameter c.}\n}\n\n\n\n\n
\n
\n\n\n
\n Given a graph G=(V,E) with edge weights ω:E→N∪0 and a subset R⊆E of required edges, the Rural Postman Problem (RPP) is to find a closed walk W* of minimum weight ω(W*) containing all edges of R. We prove that RPP is WK[1]-complete parameterized by the number and cost d=ω(W^*)-ω(R)+|W*|-|R| of edges traversed additionally to the required ones, that is, presumably cannot be polynomial-time reduced to solving instances of size polynomial in d. In contrast, denoting by b≤2d the number of vertices incident to an odd number of edges of R and by c≤d the number of connected components formed by the edges in R, we show how to reduce any RPP instance I to an RPP instance I' with 2b+O(c/ε) vertices in O(n³) time so that any α-approximate solution for I' gives an α(1+ε)-approximate solution for I, for any α≥1 and ε>0. That is, we provide a polynomial-size approximate kernelization scheme (PSAKS). We make first steps towards a PSAKS for the parameter c.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2018\n \n \n (4)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n Parameterized algorithms and data reduction for safe convoy routing.\n \n \n \n\n\n \n van Bevern, R.; Fluschnik, T.; and Tsidulko, O.\n\n\n \n\n\n\n In Borndörfer, R.; and Storandt, S., editor(s), ATMOS 2018, volume 65, of OpenAccess Series in Informatics (OASIcs), pages 10:1–10:19. Schloss Dagstuhl–Leibniz-Zentrum fuer Informatik, 2018.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@incollection{BFT18,\n  author =\t {Ren{\\'{e}} van Bevern and Till Fluschnik and Oxana\n                  Tsidulko},\n  title =\t {Parameterized algorithms and data reduction for safe\n                  convoy routing},\n  year =\t 2018,\n  date =\t {2018-08-29},\n  doi =\t\t {10.4230/OASIcs.ATMOS.2018.10},\n  editor =\t {Ralf Borndörfer and Sabine Storandt},\n  booktitle =\t {ATMOS 2018},\n  publisher =\t {Schloss Dagstuhl--Leibniz-Zentrum fuer Informatik},\n  series =\t {OpenAccess Series in Informatics (OASIcs)},\n  pages =\t {10:1--10:19},\n  volume =\t 65,\n  keywords =\t {NP-hard problem, fixed-parameter tractability,\n                  problem kernelization, shortest path, secluded\n                  solution},\n  abstract =\t {We study a problem that models safely routing a\n                  convoy through a transportation network, where any\n                  vertex adjacent to the travel path of the convoy\n                  requires additional precaution: Given a graph\n                  G=(V,E), two vertices s,t∈V, and two integers k,ℓ,\n                  we search for a simple s-t-path with at most k\n                  vertices and at most ℓ neighbors. We study the\n                  problem in two types of transportation networks:\n                  graphs with small crossing number, as formed by road\n                  networks, and tree-like graphs, as formed by\n                  waterways. For graphs with constant crossing number,\n                  we provide a subexponential 2^O(√n)-time algorithm\n                  and prove a matching lower bound. We also show a\n                  polynomial-time data reduction algorithm that\n                  reduces any problem instance to an equivalent\n                  instance (a so-called problem kernel) of size\n                  polynomial in the vertex cover number of the input\n                  graph. In contrast, we show that the problem in\n                  general graphs is hard to preprocess. Regarding\n                  tree-like graphs, we obtain a 2^O(tw)⋅ℓ^2⋅n-time\n                  algorithm for graphs of treewidth tw, show that\n                  there is no problem kernel with size polynomial in\n                  tw, yet show a problem kernel with size polynomial\n                  in the feedback edge number of the input graph.}\n}\n\n\n
\n
\n\n\n
\n We study a problem that models safely routing a convoy through a transportation network, where any vertex adjacent to the travel path of the convoy requires additional precaution: Given a graph G=(V,E), two vertices s,t∈V, and two integers k,ℓ, we search for a simple s-t-path with at most k vertices and at most ℓ neighbors. We study the problem in two types of transportation networks: graphs with small crossing number, as formed by road networks, and tree-like graphs, as formed by waterways. For graphs with constant crossing number, we provide a subexponential 2^O(√n)-time algorithm and prove a matching lower bound. We also show a polynomial-time data reduction algorithm that reduces any problem instance to an equivalent instance (a so-called problem kernel) of size polynomial in the vertex cover number of the input graph. In contrast, we show that the problem in general graphs is hard to preprocess. Regarding tree-like graphs, we obtain a 2^O(tw)⋅ℓ^2⋅n-time algorithm for graphs of treewidth tw, show that there is no problem kernel with size polynomial in tw, yet show a problem kernel with size polynomial in the feedback edge number of the input graph.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Parameterized complexity of machine scheduling: 15 open problems.\n \n \n \n \n\n\n \n Mnich, M.; and van Bevern, R.\n\n\n \n\n\n\n Computers & Operations Research, 100: 254–261. 2018.\n \n\n\n\n
\n\n\n\n \n \n \"Parameterized preprint\n  \n \n \n \"Parameterized slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 2 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{MB18,\n  title =\t "Parameterized complexity of machine scheduling: 15\n                  open problems",\n  journal =\t "Computers \\& Operations Research",\n  volume =\t "100",\n  pages =\t "254--261",\n  year =\t "2018",\n  issn =\t "0305-0548",\n  doi =\t\t "10.1016/j.cor.2018.07.020",\n  author =\t "Matthias Mnich and René van Bevern",\n  keywords =\t "Parallel machines, Shop scheduling, Makespan, Total\n                  completion time, Total tardiness, Throughput, Number\n                  of tardy jobs",\n  abstract =\t "Machine scheduling problems are a long-time key\n                  domain of algorithms and complexity research. A\n                  novel approach to machine scheduling problems are\n                  fixed-parameter algorithms. To stimulate this\n                  thriving research direction, we propose 15 open\n                  questions in this area whose resolution we expect to\n                  lead to the discovery of new approaches and\n                  techniques both in scheduling and parameterized\n                  complexity theory.",\n  date =\t {2018-08-16},\n  url_Preprint = {http://arxiv.org/abs/1709.01670},\n  url_Slides =\n                  {https://www.researchgate.net/publication/342122033_Open_problems_on_the_parameterized_complexity_of_scheduling_problems}\n}\n\n\n\n
\n
\n\n\n
\n Machine scheduling problems are a long-time key domain of algorithms and complexity research. A novel approach to machine scheduling problems are fixed-parameter algorithms. To stimulate this thriving research direction, we propose 15 open questions in this area whose resolution we expect to lead to the discovery of new approaches and techniques both in scheduling and parameterized complexity theory.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Parameterizing edge modification problems above lower bounds.\n \n \n \n \n\n\n \n van Bevern, R.; Froese, V.; and Komusiewicz, C.\n\n\n \n\n\n\n Theory of Computing Systems, 62(3): 739–770. 2018.\n \n\n\n\n
\n\n\n\n \n \n \"Parameterizing preprint\n  \n \n \n \"Parameterizing slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 3 downloads\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{BFK18,\n  title =\t {Parameterizing edge modification problems above\n                  lower bounds},\n  author =\t {René van Bevern and Vincent Froese and Christian\n                  Komusiewicz},\n  url_Preprint = {http://arxiv.org/abs/1512.04047},\n  doi =\t\t {10.1007/s00224-016-9746-5},\n  url_Slides =\t {http://rvb.su/pdf/above-guarantee-editing-csr16.pdf},\n  year =\t 2018,\n  date =\t {2018-04-01},\n  journal =\t {Theory of Computing Systems},\n  abstract =\t {We study the parameterized complexity of a variant\n                  of the F-free Editing problem: Given a graph G and a\n                  natural number k, is it possible to modify at most k\n                  edges in G so that the resulting graph contains no\n                  induced subgraph isomorphic to F?  In our variant,\n                  the input additionally contains a vertex-disjoint\n                  packing H of induced subgraphs of G, which provides\n                  a lower bound h(H) on the number of edge\n                  modifications required to transform G into an F-free\n                  graph.  While earlier works used the number k as\n                  parameter or structural parameters of the input\n                  graph G, we consider instead the parameter\n                  l:=k-h(H), that is, the number of edge modifications\n                  above the lower bound h(H).  We develop a framework\n                  of generic data reduction rules to show\n                  fixed-parameter tractability with respect to l for\n                  K_3-Free Editing, Feedback Arc Set in Tournaments,\n                  and Cluster Editing when the packing H contains\n                  subgraphs with bounded solution size.  For K_3-Free\n                  Editing, we also prove NP-hardness in case of\n                  edge-disjoint packings of K_3s and l=0, while for\n                  K_q-Free Editing and qge 6, NP-hardness for l=0 even\n                  holds for vertex-disjoint packings of K_qs.  In\n                  addition, we provide NP-hardness results for F-free\n                  Vertex Deletion, were the aim is to delete a minimum\n                  number of vertices to make the input graph F-free.},\n  keywords =\t {algorithmic graph theory, graph modification,\n                  invited, network analysis, NP-hard, parameterized\n                  complexity},\n  volume =\t "62",\n  number =\t "3",\n  pages =\t "739--770",\n}\n\n
\n
\n\n\n
\n We study the parameterized complexity of a variant of the F-free Editing problem: Given a graph G and a natural number k, is it possible to modify at most k edges in G so that the resulting graph contains no induced subgraph isomorphic to F? In our variant, the input additionally contains a vertex-disjoint packing H of induced subgraphs of G, which provides a lower bound h(H) on the number of edge modifications required to transform G into an F-free graph. While earlier works used the number k as parameter or structural parameters of the input graph G, we consider instead the parameter l:=k-h(H), that is, the number of edge modifications above the lower bound h(H). We develop a framework of generic data reduction rules to show fixed-parameter tractability with respect to l for K_3-Free Editing, Feedback Arc Set in Tournaments, and Cluster Editing when the packing H contains subgraphs with bounded solution size. For K_3-Free Editing, we also prove NP-hardness in case of edge-disjoint packings of K_3s and l=0, while for K_q-Free Editing and qge 6, NP-hardness for l=0 even holds for vertex-disjoint packings of K_qs. In addition, we provide NP-hardness results for F-free Vertex Deletion, were the aim is to delete a minimum number of vertices to make the input graph F-free.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n The parameterized complexity of finding secluded solutions to some classical optimization problems on graphs.\n \n \n \n \n\n\n \n van Bevern, R.; Fluschnik, T.; Mertzios, G. B.; Molter, H.; Sorge, M.; and Suchý, O.\n\n\n \n\n\n\n Discrete Optimzation, 30: 20–50. 2018.\n \n\n\n\n
\n\n\n\n \n \n \"The preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BFM+18,\n  title =\t {The parameterized complexity of finding secluded\n                  solutions to some classical optimization problems on\n                  graphs},\n  author =\t {René van Bevern and Till Fluschnik and George\n                  B. Mertzios and Hendrik Molter and Manuel Sorge and\n                  Ondřej Suchý},\n  date =\t {2018-11-01},\n  year =\t 2018,\n  journal =\t {Discrete Optimzation},\n  volume =\t 30,\n  pages =\t {20--50},\n  url_Preprint = {https://arxiv.org/abs/1606.09000},\n  doi =\t\t {10.1016/j.disopt.2018.05.002},\n  abstract =\t {This work studies the parameterized complexity of\n                  finding secluded solutions to classical\n                  combinatorial optimization problems on graphs such\n                  as finding minimum s-t separators, feedback vertex\n                  sets, dominating sets, maximum independent sets, and\n                  vertex deletion problems for hereditary graph\n                  properties: Herein, one searches not only to\n                  minimize or maximize the size of the solution, but\n                  also to minimize the size of its neighborhood. This\n                  restriction has applications in secure routing and\n                  community detection.}\n}\n\n\n
\n
\n\n\n
\n This work studies the parameterized complexity of finding secluded solutions to classical combinatorial optimization problems on graphs such as finding minimum s-t separators, feedback vertex sets, dominating sets, maximum independent sets, and vertex deletion problems for hereditary graph properties: Herein, one searches not only to minimize or maximize the size of the solution, but also to minimize the size of its neighborhood. This restriction has applications in secure routing and community detection.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2017\n \n \n (6)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n A parameterized approximation algorithm for the mixed and windy Capacitated Arc Routing Problem: theory and experiments.\n \n \n \n \n\n\n \n van Bevern, R.; Komusiewicz, C.; and Sorge, M.\n\n\n \n\n\n\n Networks, 70(3): 262–278. 2017.\n \n\n\n\n
\n\n\n\n \n \n \"A preprint\n  \n \n \n \"A slides\n  \n \n \n \"A code\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{BKS17,\n  title =\t {A parameterized approximation algorithm for the\n                  mixed and windy Capacitated Arc Routing Problem:\n                  theory and experiments},\n  author =\t {René van Bevern and Christian Komusiewicz and Manuel\n                  Sorge},\n  journal =\t {Networks},\n  volume =\t {70},\n  number =\t {3},\n  issn =\t {1097-0037},\n  doi =\t\t {10.1002/net.21742},\n  pages =\t {262--278},\n  keywords =\t {vehicle routing, transportation, Rural Postman,\n                  Chinese Postman, NP-hard problem, fixed-parameter\n                  algorithm, combinatorial optimization},\n  year =\t {2017},\n  url_Preprint = {https://arxiv.org/abs/1506.05620},\n  url_Slides =\n                  {http://rvb.su/pdf/approximation-directed-arc-routing-atmos15.pdf},\n  url_Code =\t {https://gitlab.com/rvb/mwcarp-approx},\n  date =\t {2017-10-01},\n  abstract =\t {We prove that any polynomial-time α(n)-approximation\n                  algorithm for the n-vertex metric asymmetric\n                  Traveling Salesperson Problem yields a\n                  polynomial-time O(α(C))-approximation algorithm for\n                  the mixed and windy Capacitated Arc Routing Problem,\n                  where C is the number of weakly connected components\n                  in the subgraph induced by the positive-demand\n                  arcs---a small number in many applications.  In\n                  conjunction with known results, we obtain\n                  constant-factor approximations for C∈O(log n) and\n                  O(log C/log log C)-approximations in general.\n                  Experiments show that our algorithm, together with\n                  several heuristic enhancements, outperforms many\n                  previous polynomial-time heuristics.  Finally, since\n                  the solution quality achievable in polynomial time\n                  appears to mainly depend on C and since C=1 in\n                  almost all benchmark instances, we propose the Ob\n                  benchmark set, simulating cities that are divided\n                  into several components by a river.}\n}\n\n
\n
\n\n\n
\n We prove that any polynomial-time α(n)-approximation algorithm for the n-vertex metric asymmetric Traveling Salesperson Problem yields a polynomial-time O(α(C))-approximation algorithm for the mixed and windy Capacitated Arc Routing Problem, where C is the number of weakly connected components in the subgraph induced by the positive-demand arcs—a small number in many applications. In conjunction with known results, we obtain constant-factor approximations for C∈O(log n) and O(log C/log log C)-approximations in general. Experiments show that our algorithm, together with several heuristic enhancements, outperforms many previous polynomial-time heuristics. Finally, since the solution quality achievable in polynomial time appears to mainly depend on C and since C=1 in almost all benchmark instances, we propose the Ob benchmark set, simulating cities that are divided into several components by a river.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n A parameterized complexity view on non-preemptively scheduling interval-constrained jobs: few machines, small looseness, and small slack.\n \n \n \n \n\n\n \n van Bevern, R.; Niedermeier, R.; and Suchý, O.\n\n\n \n\n\n\n Journal of Scheduling, 20(3): 255–265. 2017.\n \n\n\n\n
\n\n\n\n \n \n \"A preprint\n  \n \n \n \"A slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{BNS17,\n  title =\t {A parameterized complexity view on non-preemptively\n                  scheduling interval-constrained jobs: few machines,\n                  small looseness, and small slack},\n  author =\t {René van Bevern and Rolf Niedermeier and Ondřej\n                  Suchý},\n  url_Preprint = {http://arxiv.org/abs/1508.01657},\n  doi =\t\t {10.1007/s10951-016-0478-9},\n  url_Slides =\t {http://rvb.su/pdf/looseness-slack-door16.pdf},\n  issn =\t {1094-6136},\n  year =\t 2017,\n  date =\t {2017-06-01},\n  journal =\t {Journal of Scheduling},\n  publisher =\t {Springer},\n  abstract =\t {We study the problem of non-preemptively scheduling\n                  n jobs, each job j with a release time t_j, a\n                  deadline d_j, and a processing time p_j, on m\n                  parallel identical machines.  Cieliebak et\n                  al. considered the two constraints |d_j-t_j| ≤ λp_j\n                  and |d_j-t_j| ≤ p_j+σ and showed the problem to be\n                  NP-hard for any λ>1 and for any σ≥2.  We complement\n                  their results by parameterized complexity studies:\n                  we show that, for any λ>1, the problem remains\n                  weakly NP-hard even for m=2 and strongly W[1]-hard\n                  parameterized by m.  We present a\n                  pseudo-polynomial-time algorithm for constant m and\n                  λ and a fixed-parameter tractability result for the\n                  parameter m combined with σ.},\n  volume =\t 20,\n  number =\t 3,\n  pages =\t {255–265}\n}\n\n
\n
\n\n\n
\n We study the problem of non-preemptively scheduling n jobs, each job j with a release time t_j, a deadline d_j, and a processing time p_j, on m parallel identical machines. Cieliebak et al. considered the two constraints |d_j-t_j| ≤ λp_j and |d_j-t_j| ≤ p_j+σ and showed the problem to be NP-hard for any λ>1 and for any σ≥2. We complement their results by parameterized complexity studies: we show that, for any λ>1, the problem remains weakly NP-hard even for m=2 and strongly W[1]-hard parameterized by m. We present a pseudo-polynomial-time algorithm for constant m and λ and a fixed-parameter tractability result for the parameter m combined with σ.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Finding Secluded Places of Special Interest in Graphs.\n \n \n \n\n\n \n van Bevern, R.; Fluschnik, T.; Mertzios, G. B.; Molter, H.; Sorge, M.; and Suchý, O.\n\n\n \n\n\n\n In Guo, J.; and Hermelin, D., editor(s), IPEC 2016, volume 63, of Leibniz International Proceedings in Informatics (LIPIcs), pages 5:1–5:16. Schloss Dagstuhl–Leibniz-Zentrum fuer Informatik, 2017.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{BFM+17,\n  author =\t {René van Bevern and Till Fluschnik and George\n                  B. Mertzios and Hendrik Molter and Manuel Sorge and\n                  Ondřej Suchý},\n  title =\t {Finding Secluded Places of Special Interest in\n                  Graphs},\n  date =\t {2017-02-10},\n  booktitle =\t {IPEC 2016},\n  editor =\t {Jiong Guo and Danny Hermelin},\n  abstract =\t {Finding a vertex subset in a graph that satisfies a\n                  certain property is one of the most-studied topics\n                  in algorithmic graph theory. The focus herein is\n                  often on minimizing or maximizing the size of the\n                  solution, that is, the size of the desired vertex\n                  set. In several applications, however, we also want\n                  to limit the "exposure" of the solution to the rest\n                  of the graph. This is the case, for example, when\n                  the solution represents persons that ought to deal\n                  with sensitive information or a segregated\n                  community. In this work, we thus explore the\n                  (parameterized) complexity of finding such secluded\n                  vertex subsets for a wide variety of properties that\n                  they shall fulfill. More precisely, we study the\n                  constraint that the (open or closed) neighborhood of\n                  the solution shall be bounded by a parameter and the\n                  influence of this constraint on the complexity of\n                  minimizing separators, feedback vertex sets, F-free\n                  vertex deletion sets, dominating sets, and the\n                  maximization of independent sets.},\n  pages =\t {5:1--5:16},\n  series =\t {Leibniz International Proceedings in Informatics\n                  (LIPIcs)},\n  ISBN =\t {978-3-95977-023-1},\n  ISSN =\t {1868-8969},\n  year =\t {2017},\n  volume =\t {63},\n  publisher =\t {Schloss Dagstuhl--Leibniz-Zentrum fuer Informatik},\n  doi =\t\t {10.4230/LIPIcs.IPEC.2016.5},\n}\n\n
\n
\n\n\n
\n Finding a vertex subset in a graph that satisfies a certain property is one of the most-studied topics in algorithmic graph theory. The focus herein is often on minimizing or maximizing the size of the solution, that is, the size of the desired vertex set. In several applications, however, we also want to limit the \"exposure\" of the solution to the rest of the graph. This is the case, for example, when the solution represents persons that ought to deal with sensitive information or a segregated community. In this work, we thus explore the (parameterized) complexity of finding such secluded vertex subsets for a wide variety of properties that they shall fulfill. More precisely, we study the constraint that the (open or closed) neighborhood of the solution shall be bounded by a parameter and the influence of this constraint on the complexity of minimizing separators, feedback vertex sets, F-free vertex deletion sets, dominating sets, and the maximization of independent sets.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Fixed-parameter algorithms for DAG Partitioning.\n \n \n \n \n\n\n \n van Bevern, R.; Bredereck, R.; Chopin, M.; Hartung, S.; Hüffner, F.; Nichterlein, A.; and Suchý, O.\n\n\n \n\n\n\n Discrete Applied Mathematics, 220: 134–160. 2017.\n \n\n\n\n
\n\n\n\n \n \n \"Fixed-parameter preprint\n  \n \n \n \"Fixed-parameter code\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{BBC+17,\n  title =\t "Fixed-parameter algorithms for {DAG} Partitioning",\n  journal =\t "Discrete Applied Mathematics",\n  volume =\t 220,\n  pages =\t "134--160",\n  year =\t 2017,\n  issn =\t "0166-218X",\n  doi =\t\t "10.1016/j.dam.2016.12.002",\n  author =\t "René van Bevern and Robert Bredereck and Morgan\n                  Chopin and Sepp Hartung and Falk Hüffner and André\n                  Nichterlein and Ondřej Suchý",\n  keywords =\t "NP-hard problem",\n  keywords =\t "Graph algorithms",\n  keywords =\t "Polynomial-time data reduction",\n  keywords =\t "Multiway cut",\n  keywords =\t "Linear-time algorithms",\n  keywords =\t "Algorithm engineering",\n  keywords =\t "Evaluating heuristics ",\n  abstract =\t "Abstract Finding the origin of short phrases\n                  propagating through the web has been formalized by\n                  Leskovec et al. (2009) as \\{DAG\\} Partitioning:\n                  given an arc-weighted directed acyclic graph on n\n                   vertices and m  arcs, delete arcs with total weight\n                  at most  k such that each resulting weakly-connected\n                  component contains exactly one sink—a vertex without\n                  outgoing arcs. \\{DAG\\} Partitioning is NP-hard. We\n                  show an algorithm to solve \\{DAG\\} Partitioning in O\n                  ( 2 k ⋅ ( n + m ) )  time, that is, in linear time\n                  for fixed  k . We complement it with linear-time\n                  executable data reduction rules. Our experiments\n                  show that, in combination, they can optimally solve\n                  \\{DAG\\} Partitioning on simulated citation networks\n                  within five minutes for k ≤ 190 and m being  10 7\n                  and larger. We use our obtained optimal solutions to\n                  evaluate the solution quality of Leskovec et al.’s\n                  heuristic. We show that Leskovec et al.’s heuristic\n                  works optimally on trees and generalize this result\n                  by showing that \\{DAG\\} Partitioning is solvable in\n                  2 O ( t 2 ) ⋅ n time if a width- t tree\n                  decomposition of the input graph is given. Thus, we\n                  improve an algorithm and answer an open question of\n                  Alamdari and Mehrabian (2012). We complement our\n                  algorithms by lower bounds on the running time of\n                  exact algorithms and on the effectivity of data\n                  reduction. ",\n  url_Preprint = {https://arxiv.org/abs/1611.08809},\n  url_Code =\t {https://gitlab.com/rvb/dagpart/}\n}\n\n\n
\n
\n\n\n
\n Abstract Finding the origin of short phrases propagating through the web has been formalized by Leskovec et al. (2009) as \\DAG\\ Partitioning: given an arc-weighted directed acyclic graph on n  vertices and m  arcs, delete arcs with total weight at most  k such that each resulting weakly-connected component contains exactly one sink—a vertex without outgoing arcs. \\DAG\\ Partitioning is NP-hard. We show an algorithm to solve \\DAG\\ Partitioning in O ( 2 k ⋅ ( n + m ) )  time, that is, in linear time for fixed  k . We complement it with linear-time executable data reduction rules. Our experiments show that, in combination, they can optimally solve \\DAG\\ Partitioning on simulated citation networks within five minutes for k ≤ 190 and m being  10 7 and larger. We use our obtained optimal solutions to evaluate the solution quality of Leskovec et al.’s heuristic. We show that Leskovec et al.’s heuristic works optimally on trees and generalize this result by showing that \\DAG\\ Partitioning is solvable in 2 O ( t 2 ) ⋅ n time if a width- t tree decomposition of the input graph is given. Thus, we improve an algorithm and answer an open question of Alamdari and Mehrabian (2012). We complement our algorithms by lower bounds on the running time of exact algorithms and on the effectivity of data reduction. \n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Parameterized algorithms for power-efficient connected symmetric wireless sensor networks.\n \n \n \n \n\n\n \n Bentert, M.; van Bevern, R.; Nichterlein, A.; and Niedermeier, R.\n\n\n \n\n\n\n In Anta, A. F.; Jurdzinski, T.; Mosteiro, M. A.; and Zhang, Y., editor(s), ALGOSENSORS 2017, volume 10718, of Lecture Notes in Computer Science, pages 26–40. Springer, 2017.\n \n\n\n\n
\n\n\n\n \n \n \"Parameterized slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{BBNN17,\n  author =\t {{Bentert}, M. and R. van Bevern and {Nichterlein},\n                  A. and {Niedermeier}, R.},\n  title =\t {Parameterized algorithms for power-efficient\n                  connected symmetric wireless sensor networks},\n  booktitle =\t {ALGOSENSORS 2017},\n  editor =\t {Antonio Fernández Anta and Tomasz Jurdzinski and\n                  Miguel A. Mosteiro and Yanyong Zhang},\n  year =\t 2017,\n  series =\t {Lecture Notes in Computer Science},\n  volume =\t {10718},\n  pages =\t "26--40",\n  doi =\t\t "10.1007/978-3-319-72751-6_3",\n  url_Slides =\n                  {https://www.researchgate.net/publication/328262005_The_min-power_symmetric_connectivity_problem_with_few_obligatory_network_components},\n  publisher =\t {Springer},\n  date =\t {2017-12-31},\n  abstract =\t {We study an NP-hard problem motivated by\n                  energy-efficiently maintaining the connectivity of a\n                  symmetric wireless sensor communication\n                  network. Given an edge-weighted n-vertex graph, find\n                  a connected spanning subgraph of minimum cost, where\n                  the cost is determined by letting each vertex pay\n                  the most expensive edge incident to it in the\n                  subgraph. We provide an algorithm that works in\n                  polynomial time if one can find a set of obligatory\n                  edges that yield a spanning subgraph with O(logn)\n                  connected components. We also provide a linear-time\n                  algorithm that reduces any input graph that consists\n                  of a tree together with g additional edges to an\n                  equivalent graph with O(g) vertices. Based on this,\n                  we obtain a polynomial-time algorithm for\n                  g∈O(logn). On the negative side, we show that\n                  o(logn)-approximating the difference d between the\n                  optimal solution cost and a natural lower bound is\n                  NP-hard and that there are presumably no exact\n                  algorithms running in 2o(n) time or in f(d)⋅nO(1)\n                  time for any computable function f.}\n}\n\n\n
\n
\n\n\n
\n We study an NP-hard problem motivated by energy-efficiently maintaining the connectivity of a symmetric wireless sensor communication network. Given an edge-weighted n-vertex graph, find a connected spanning subgraph of minimum cost, where the cost is determined by letting each vertex pay the most expensive edge incident to it in the subgraph. We provide an algorithm that works in polynomial time if one can find a set of obligatory edges that yield a spanning subgraph with O(logn) connected components. We also provide a linear-time algorithm that reduces any input graph that consists of a tree together with g additional edges to an equivalent graph with O(g) vertices. Based on this, we obtain a polynomial-time algorithm for g∈O(logn). On the negative side, we show that o(logn)-approximating the difference d between the optimal solution cost and a natural lower bound is NP-hard and that there are presumably no exact algorithms running in 2o(n) time or in f(d)⋅nO(1) time for any computable function f.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Partitioning perfect graphs into stars.\n \n \n \n \n\n\n \n van Bevern, R.; Bredereck, R.; Bulteau, L.; Chen, J.; Froese, V.; Niedermeier, R.; and Woeginger, G. J.\n\n\n \n\n\n\n Journal of Graph Theory, 85(2): 297–335. 2017.\n \n\n\n\n
\n\n\n\n \n \n \"Partitioning preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{BBB+17,\n  title =\t {Partitioning perfect graphs into stars},\n  author =\t {René van Bevern and Robert Bredereck and Laurent\n                  Bulteau and Jiehua Chen and Vincent Froese and Rolf\n                  Niedermeier and Gerhard J. Woeginger},\n  url_Preprint = {http://arxiv.org/abs/1402.2589},\n  doi =\t\t {10.1002/jgt.22062},\n  year =\t 2017,\n  date =\t {2017-06-01},\n  journal =\t {Journal of Graph Theory},\n  abstract =\t {The partition of graphs into nice subgraphs is a\n                  central algorithmic problem with strong ties to\n                  matching theory. We study the partitioning of\n                  undirected graphs into stars of the same size, a\n                  problem known to be NP-complete even for the case of\n                  stars on three vertices. We perform a thorough\n                  computational complexity study of the problem on\n                  subclasses of perfect graphs and identify several\n                  polynomial-time solvable cases, for example, on\n                  interval graphs and bipartite permutation graphs,\n                  and also NP-complete cases, for example, on grid\n                  graphs and chordal graphs.},\n  volume =\t 85,\n  number =\t 2,\n  pages =\t {297--335},\n  keywords =\t {algorithmic graph theory}\n}\n\n
\n
\n\n\n
\n The partition of graphs into nice subgraphs is a central algorithmic problem with strong ties to matching theory. We study the partitioning of undirected graphs into stars of the same size, a problem known to be NP-complete even for the case of stars on three vertices. We perform a thorough computational complexity study of the problem on subclasses of perfect graphs and identify several polynomial-time solvable cases, for example, on interval graphs and bipartite permutation graphs, and also NP-complete cases, for example, on grid graphs and chordal graphs.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2016\n \n \n (7)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Completing partial schedules for Open Shop with unit processing times and routing.\n \n \n \n \n\n\n \n van Bevern, R.; and Pyatkin, A. V.\n\n\n \n\n\n\n In Kulikov, A. S.; and Woeginger, G. J., editor(s), CSR 2016, volume 9691, of Lecture Notes in Computer Science, pages 73-87. Springer, 2016.\n \n\n\n\n
\n\n\n\n \n \n \"Completing slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
@incollection{BP16,\n  title =\t {Completing partial schedules for Open Shop with unit\n                  processing times and routing},\n  doi =\t\t {10.1007/978-3-319-34171-2_6},\n  author =\t {René van Bevern and Artem V. Pyatkin},\n  url_Slides =\t {http://rvb.su/pdf/routing-open-shop-csr16.pdf},\n  isbn =\t {978-3-319-34170-5},\n  year =\t {2016},\n  date =\t {2016-05-31},\n  booktitle =\t {CSR 2016},\n  editor =\t {Alexander S. Kulikov and Gerhard J. Woeginger},\n  volume =\t {9691},\n  pages =\t {73-87},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {Open Shop is a classical scheduling problem: given a\n                  set J of jobs and a set M of machines, find a\n                  minimum-makespan schedule to process each job J_i∈J\n                  on each machine M_q∈M for a given amount p_iq of\n                  time such that each machine processes only one job\n                  at a time and each job is processed by only one\n                  machine at a time. In Routing Open Shop, the jobs\n                  are located in the vertices of an edge-weighted\n                  graph G=(V,E) whose edge weights determine the time\n                  needed for the machines to travel between\n                  jobs. Routing Open Shop is NP-hard for\n                  |V|=|M|=2. For the special case of unit processing\n                  times p_iq=1, we exploit Galvin's theorem about\n                  list-coloring edges of bipartite graphs to prove a\n                  theorem that gives a sufficient condition for the\n                  completability of partial schedules. Exploiting this\n                  schedule completion theorem and integer linear\n                  programming, we show that Routing Open Shop with\n                  unit processing times is solvable in 2^O(|V||M|^2\n                  log |V||M|) poly(|J|)) time, that is,\n                  fixed-parameter tractable parameterized by\n                  |V|+|M|. Various upper bounds shown using the\n                  schedule completion theorem suggest it to be\n                  likewise beneficial for the development of\n                  approximation algorithms.},\n  keywords =\t {NP-hard, parameterized complexity, routing,\n                  scheduling}\n}\n\n\n
\n
\n\n\n
\n Open Shop is a classical scheduling problem: given a set J of jobs and a set M of machines, find a minimum-makespan schedule to process each job J_i∈J on each machine M_q∈M for a given amount p_iq of time such that each machine processes only one job at a time and each job is processed by only one machine at a time. In Routing Open Shop, the jobs are located in the vertices of an edge-weighted graph G=(V,E) whose edge weights determine the time needed for the machines to travel between jobs. Routing Open Shop is NP-hard for |V|=|M|=2. For the special case of unit processing times p_iq=1, we exploit Galvin's theorem about list-coloring edges of bipartite graphs to prove a theorem that gives a sufficient condition for the completability of partial schedules. Exploiting this schedule completion theorem and integer linear programming, we show that Routing Open Shop with unit processing times is solvable in 2^O(|V||M|^2 log |V||M|) poly(|J|)) time, that is, fixed-parameter tractable parameterized by |V|+|M|. Various upper bounds shown using the schedule completion theorem suggest it to be likewise beneficial for the development of approximation algorithms.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Exploiting hidden structure in selecting dimensions that distinguish vectors.\n \n \n \n \n\n\n \n Froese, V.; van Bevern, R.; Niedermeier, R.; and Sorge, M.\n\n\n \n\n\n\n Journal of Computer and System Sciences, 82(3): 521–535. 2016.\n \n\n\n\n
\n\n\n\n \n \n preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@article{FBNS16,\n  title =\t {Exploiting hidden structure in selecting dimensions\n                  that distinguish vectors},\n  author =\t {Vincent Froese and René van Bevern and Rolf\n                  Niedermeier and Manuel Sorge},\n  url_Preprint = {http://arxiv.org/abs/1512.01150},\n  doi =\t\t {10.1016/j.jcss.2015.11.011},\n  issn =\t {0022-0000},\n  year =\t 2016,\n  date =\t {2016-05-01},\n  journal =\t {Journal of Computer and System Sciences},\n  volume =\t 82,\n  number =\t 3,\n  pages =\t {521--535},\n  abstract =\t {The NP-hard Distinct Vectors problem asks to delete\n                  as many columns as possible from a matrix such that\n                  all rows in the resulting matrix are still pairwise\n                  distinct. Our main result is that, for binary\n                  matrices, there is a complexity dichotomy for\n                  Distinct Vectors based on the maximum (H) and the\n                  minimum (h) pairwise Hamming distance between matrix\n                  rows: Distinct Vectors can be solved in polynomial\n                  time if H≤2⌈h/2⌉+1, and is NP-complete\n                  otherwise. Moreover, we explore connections of\n                  Distinct Vectors to hitting sets, thereby providing\n                  several fixed-parameter tractability and\n                  intractability results also for general matrices.}\n}\n\n
\n
\n\n\n
\n The NP-hard Distinct Vectors problem asks to delete as many columns as possible from a matrix such that all rows in the resulting matrix are still pairwise distinct. Our main result is that, for binary matrices, there is a complexity dichotomy for Distinct Vectors based on the maximum (H) and the minimum (h) pairwise Hamming distance between matrix rows: Distinct Vectors can be solved in polynomial time if H≤2⌈h/2⌉+1, and is NP-complete otherwise. Moreover, we explore connections of Distinct Vectors to hitting sets, thereby providing several fixed-parameter tractability and intractability results also for general matrices.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n H-Index Manipulation by Merging Articles: Models, Theory, and Experiments.\n \n \n \n \n\n\n \n van Bevern, R.; Komusiewicz, C.; Niedermeier, R.; Sorge, M.; and Walsh, T.\n\n\n \n\n\n\n Artificial Intelligence, 240: 19–35. 2016.\n \n\n\n\n
\n\n\n\n \n \n \"H-Index preprint\n  \n \n \n \"H-Index code\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@article{BKN+16,\n  title =\t {H-Index Manipulation by Merging Articles: Models,\n                  Theory, and Experiments},\n  author =\t {René van Bevern and Christian Komusiewicz and Rolf\n                  Niedermeier and Manuel Sorge and Toby Walsh},\n  url_Preprint = {http://arxiv.org/abs/1412.5498},\n  url_Code =\t {http://fpt.akt.tu-berlin.de/hindex/},\n  year =\t 2016,\n  date =\t {2016-11-01},\n  journal =\t {Artificial Intelligence},\n  volume =\t 240,\n  pages =\t {19--35},\n  abstract =\t {An author's profile on Google Scholar consists of\n                  indexed articles and associated data, such as the\n                  number of citations and the H-index. The author is\n                  allowed to merge articles, which may affect the\n                  H-index. We analyze the (parameterized)\n                  computational complexity of maximizing the H-index\n                  using article merges. Herein, to model realistic\n                  manipulation scenarios, we define a compatibility\n                  graph whose edges correspond to plausible\n                  merges. Moreover, we consider several different\n                  measures for computing the citation count of a\n                  merged article. For the measure used by Google\n                  Scholar, we give an algorithm that maximizes the\n                  H-index in linear time if the compatibility graph\n                  has constant-size connected components. In contrast,\n                  if we allow to merge arbitrary articles (that is,\n                  for arbitrary compatibility graphs), then already\n                  increasing the H-index by one is\n                  NP-hard. Experiments on Google Scholar profiles of\n                  AI researchers show that the H-index can be\n                  manipulated substantially only if one merges\n                  articles with highly dissimilar titles.},\n  doi =\t\t {10.1016/j.artint.2016.08.001},\n  keywords =\t {network analysis, parameterized complexity}\n}\n\n
\n
\n\n\n
\n An author's profile on Google Scholar consists of indexed articles and associated data, such as the number of citations and the H-index. The author is allowed to merge articles, which may affect the H-index. We analyze the (parameterized) computational complexity of maximizing the H-index using article merges. Herein, to model realistic manipulation scenarios, we define a compatibility graph whose edges correspond to plausible merges. Moreover, we consider several different measures for computing the citation count of a merged article. For the measure used by Google Scholar, we give an algorithm that maximizes the H-index in linear time if the compatibility graph has constant-size connected components. In contrast, if we allow to merge arbitrary articles (that is, for arbitrary compatibility graphs), then already increasing the H-index by one is NP-hard. Experiments on Google Scholar profiles of AI researchers show that the H-index can be manipulated substantially only if one merges articles with highly dissimilar titles.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n H-Index Manipulation by Undoing Merges.\n \n \n \n\n\n \n van Bevern, R.; Komusiewicz, C.; Molter, H.; Niedermeier, R.; Sorge, M.; and Walsh, T.\n\n\n \n\n\n\n In Kaminka, G. A.; Fox, M.; Bouquet, P.; Hüllermeier, E.; Dignum, V.; Dignum, F.; and van Harmelen, F., editor(s), ECAI 2016, volume 285, of Frontiers in Artificial Intelligence and Applications, pages 895–903. IOS Press, 2016.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@incollection{BKM+16,\n  title =\t {H-Index Manipulation by Undoing Merges},\n  author =\t {René van Bevern and Christian Komusiewicz and\n                  Hendrik Molter and Rolf Niedermeier and Manuel Sorge\n                  and Toby Walsh},\n  doi =\t\t {10.3233/978-1-61499-672-9-895},\n  year =\t {2016},\n  date =\t {2016-08-29},\n  booktitle =\t {ECAI 2016},\n  editor =\t {Gal A. Kaminka and Maria Fox and Paolo Bouquet and\n                  Eyke Hüllermeier and Virginia Dignum and Frank\n                  Dignum and Frank van Harmelen},\n  publisher =\t {IOS Press},\n  abstract =\t {The h-index is one of the most important\n                  bibliographic measures used to assess the\n                  performance of researchers. Van Bevern et\n                  al. [Artif. Intel., in press] showed that, despite\n                  computational worst-case hardness results,\n                  substantial manipulation of the h-index of Google\n                  Scholar author profiles is possible by merging\n                  articles. Complementing previous work, we study the\n                  opposite operation, the splitting of articles, which\n                  is arguably the more natural operation for\n                  manipulation and which is also allowed within Google\n                  Scholar. We present numerous results on\n                  computational complexity (from linear-time\n                  algorithms to parameterized hardness results) and\n                  empirically indicate that at least small\n                  improvements of the h-index by splitting merged\n                  articles are easily achievable.},\n  volume =\t {285},\n  pages =\t {895--903},\n  series =\t {Frontiers in Artificial Intelligence and\n                  Applications},\n  keywords =\t {graph modification, NP-hard, parameterized\n                  complexity}\n}\n\n\n
\n
\n\n\n
\n The h-index is one of the most important bibliographic measures used to assess the performance of researchers. Van Bevern et al. [Artif. Intel., in press] showed that, despite computational worst-case hardness results, substantial manipulation of the h-index of Google Scholar author profiles is possible by merging articles. Complementing previous work, we study the opposite operation, the splitting of articles, which is arguably the more natural operation for manipulation and which is also allowed within Google Scholar. We present numerous results on computational complexity (from linear-time algorithms to parameterized hardness results) and empirically indicate that at least small improvements of the h-index by splitting merged articles are easily achievable.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Parameterizing edge modification problems above lower bounds.\n \n \n \n \n\n\n \n van Bevern, R.; Froese, V.; and Komusiewicz, C.\n\n\n \n\n\n\n In Kulikov, A. S.; and Woeginger, G. J., editor(s), CSR 2016, volume 9691, of Lecture Notes in Computer Science, pages 57-72. Springer, 2016.\n \n\n\n\n
\n\n\n\n \n \n \"Parameterizing slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@incollection{BFK16,\n  title =\t {Parameterizing edge modification problems above\n                  lower bounds},\n  doi =\t\t {10.1007/978-3-319-34171-2_5},\n  author =\t {René van Bevern and Vincent Froese and Christian\n                  Komusiewicz},\n  url_Slides =\t {http://rvb.su/pdf/above-guarantee-editing-csr16.pdf},\n  isbn =\t {978-3-319-34170-5},\n  year =\t {2016},\n  date =\t {2016-05-31},\n  booktitle =\t {CSR 2016},\n  editor =\t {Alexander S. Kulikov and Gerhard J. Woeginger},\n  volume =\t {9691},\n  pages =\t {57-72},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {For a fixed graph F, we study the parameterized\n                  complexity of a variant of the F-Free Editing\n                  problem: Given a graph G and a natural number k, is\n                  it possible to modify at most k edges in G so that\n                  the resulting graph contains no induced subgraph\n                  isomorphic to F? In our variant, the input\n                  additionally contains a vertex-disjoint packing H of\n                  induced subgraphs of G, which provides a lower bound\n                  h(H) on the number of edge modifications required to\n                  transform G into an F-free graph. While earlier\n                  works used the number k as parameter or structural\n                  parameters of the input graph G, we consider the\n                  parameter l:=k-h(H) instead, that is, the number of\n                  edge modifications above the lower bound h(H). We\n                  show fixed-parameter tractability with respect to l\n                  for K_3-Free Editing, Feedback Arc Set in\n                  Tournaments, and Cluster Editing when the packing H\n                  contains subgraphs with bounded modification\n                  cost. For K3-Free Editing, we also prove NP-hardness\n                  in case of edge-disjoint packings of K3s and l=0,\n                  while for Kq-Free Editing and q≥6, NP-hardness for\n                  l=0 even holds for vertex-disjoint packings of Kq.},\n  keywords =\t {graph modification, parameterized complexity},\n}\n\n
\n
\n\n\n
\n For a fixed graph F, we study the parameterized complexity of a variant of the F-Free Editing problem: Given a graph G and a natural number k, is it possible to modify at most k edges in G so that the resulting graph contains no induced subgraph isomorphic to F? In our variant, the input additionally contains a vertex-disjoint packing H of induced subgraphs of G, which provides a lower bound h(H) on the number of edge modifications required to transform G into an F-free graph. While earlier works used the number k as parameter or structural parameters of the input graph G, we consider the parameter l:=k-h(H) instead, that is, the number of edge modifications above the lower bound h(H). We show fixed-parameter tractability with respect to l for K_3-Free Editing, Feedback Arc Set in Tournaments, and Cluster Editing when the packing H contains subgraphs with bounded modification cost. For K3-Free Editing, we also prove NP-hardness in case of edge-disjoint packings of K3s and l=0, while for Kq-Free Editing and q≥6, NP-hardness for l=0 even holds for vertex-disjoint packings of Kq.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Precedence-constrained scheduling problems parameterized by partial order width.\n \n \n \n \n\n\n \n van Bevern, R.; Bredereck, R.; Bulteau, L.; Komusiewicz, C.; Talmon, N.; and Woeginger, G. J.\n\n\n \n\n\n\n In Kochetov, Y.; Khachay, M.; Beresnev, V.; Nurminski, E.; and Pardalos, P., editor(s), DOOR 2016, volume 9869, of Lecture Notes in Computer Science, pages 105-120. Springer, 2016.\n \n\n\n\n
\n\n\n\n \n \n \"Precedence-constrained preprint\n  \n \n \n \"Precedence-constrained slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@incollection{BBB+16,\n  title =\t {Precedence-constrained scheduling problems\n                  parameterized by partial order width},\n  author =\t {René van Bevern and Robert Bredereck and Laurent\n                  Bulteau and Christian Komusiewicz and Nimrod Talmon\n                  and Gerhard J. Woeginger},\n  url_Preprint = {http://arxiv.org/abs/1605.00901},\n  doi =\t\t {10.1007/978-3-319-44914-2_9},\n  url_Slides =\t {http://rvb.su/pdf/p-prec-cmax-door16.pdf},\n  year =\t {2016},\n  date =\t {2016-09-19},\n  booktitle =\t {DOOR 2016},\n  editor =\t {Yury Kochetov and Michael Khachay and Vladimir\n                  Beresnev and Evgeni Nurminski and Panos Pardalos},\n  publisher =\t {Springer},\n  abstract =\t {Negatively answering a question posed by Mnich and\n                  Wiese (Math. Program. 154(1-2):533-562), we show\n                  that P2|prec,pj∈{1,2}|Cmax, the problem of finding a\n                  non-preemptive minimum-makespan schedule for\n                  precedence-constrained jobs of lengths 1 and 2 on\n                  two parallel identical machines, is W[2]-hard\n                  parameterized by the width of the partial order\n                  giving the precedence constraints. To this end, we\n                  show that Shuffle Product, the problem of deciding\n                  whether a given word can be obtained by interleaving\n                  the letters of k other given words, is W[2]-hard\n                  parameterized by k, thus additionally answering a\n                  question posed by Rizzi and Vialette (CSR\n                  2013). Finally, refining a geometric algorithm due\n                  to Servakh\n                  (Diskretn. Anal. Issled. Oper. 7(1):75-82), we show\n                  that the more general Resource-Constrained Project\n                  Scheduling problem is fixed-parameter tractable\n                  parameterized by the partial order width combined\n                  with the maximum allowed difference between the\n                  earliest possible and factual starting time of a\n                  job. },\n  pages =\t {105-120},\n  volume =\t {9869},\n  series =\t {Lecture Notes in Computer Science},\n  keywords =\t {NP-hard, parameterized complexity, scheduling}\n}\n\n
\n
\n\n\n
\n Negatively answering a question posed by Mnich and Wiese (Math. Program. 154(1-2):533-562), we show that P2|prec,pj∈1,2|Cmax, the problem of finding a non-preemptive minimum-makespan schedule for precedence-constrained jobs of lengths 1 and 2 on two parallel identical machines, is W[2]-hard parameterized by the width of the partial order giving the precedence constraints. To this end, we show that Shuffle Product, the problem of deciding whether a given word can be obtained by interleaving the letters of k other given words, is W[2]-hard parameterized by k, thus additionally answering a question posed by Rizzi and Vialette (CSR 2013). Finally, refining a geometric algorithm due to Servakh (Diskretn. Anal. Issled. Oper. 7(1):75-82), we show that the more general Resource-Constrained Project Scheduling problem is fixed-parameter tractable parameterized by the partial order width combined with the maximum allowed difference between the earliest possible and factual starting time of a job. \n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Twins in Subdivision Drawings of Hypergraphs.\n \n \n \n \n\n\n \n van Bevern, R.; Kanj, I.; Komusiewicz, C.; Niedermeier, R.; and Sorge, M.\n\n\n \n\n\n\n In Hu, Y.; and Nöllenburg, M., editor(s), GD 2016, volume 9801, of Lecture Notes in Computer Science, pages 67–80. Springer, 2016.\n \n\n\n\n
\n\n\n\n \n \n \"Twins preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@incollection{BKK+16,\n  title =\t {Twins in Subdivision Drawings of Hypergraphs},\n  author =\t {René van Bevern and Iyad Kanj and Christian\n                  Komusiewicz and Rolf Niedermeier and Manuel Sorge},\n  year =\t 2016,\n  date =\t {2016-12-08},\n  booktitle =\t {GD 2016},\n  editor =\t {Yifan Hu and Martin Nöllenburg},\n  volume =\t {9801},\n  series =\t {Lecture Notes in Computer Science},\n  publisher =\t {Springer},\n  url_Preprint = {http://arxiv.org/abs/1511.09389v4},\n  abstract =\t {Visualizing hypergraphs, systems of subsets of some\n                  universe, has continuously attracted research\n                  interest in the last decades. We study a natural\n                  kind of hypergraph visualization called subdivision\n                  drawings.  Dinkla et al. [Comput. Graph. Forum ’12]\n                  claimed that only few hypergraphs have a subdivision\n                  drawing. However, this statement seems to be based\n                  on the assumption (also used in previous work) that\n                  the input hypergraph does not contain twins, pairs\n                  of vertices which are in precisely the same\n                  hyperedges (subsets of the universe). We show that\n                  such vertices may be necessary for a hypergraph to\n                  admit a subdivision drawing. As a counterpart, we\n                  show that the number of such “necessary twins” is\n                  upper-bounded by a function of the number m of\n                  hyperedges and a further parameter r of the desired\n                  drawing related to its number of layers. This leads\n                  to a linear-time algorithm for determining such\n                  subdivision draw- ings if m and r are constant; in\n                  other words, the problem is linear-time\n                  fixed-parameter tractable with respect to the\n                  parameters m and r.},\n  keywords =\t {algorithmic graph theory, graph modification},\n  pages =\t "67--80",\n  isbn =\t "978-3-319-50106-2",\n  doi =\t\t "10.1007/978-3-319-50106-2_6",\n}\n\n
\n
\n\n\n
\n Visualizing hypergraphs, systems of subsets of some universe, has continuously attracted research interest in the last decades. We study a natural kind of hypergraph visualization called subdivision drawings. Dinkla et al. [Comput. Graph. Forum ’12] claimed that only few hypergraphs have a subdivision drawing. However, this statement seems to be based on the assumption (also used in previous work) that the input hypergraph does not contain twins, pairs of vertices which are in precisely the same hyperedges (subsets of the universe). We show that such vertices may be necessary for a hypergraph to admit a subdivision drawing. As a counterpart, we show that the number of such “necessary twins” is upper-bounded by a function of the number m of hyperedges and a further parameter r of the desired drawing related to its number of layers. This leads to a linear-time algorithm for determining such subdivision draw- ings if m and r are constant; in other words, the problem is linear-time fixed-parameter tractable with respect to the parameters m and r.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2015\n \n \n (8)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Approximability and parameterized complexity of multicover by $c$-intervals.\n \n \n \n \n\n\n \n van Bevern, R.; Chen, J.; Hüffner, F.; Kratsch, S.; Talmon, N.; and Woeginger, G. J.\n\n\n \n\n\n\n Information Processing Letters, 115(10): 744–749. 2015.\n \n\n\n\n
\n\n\n\n \n \n \"Approximability preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{BCH+15,\n  title =\t {Approximability and parameterized complexity of\n                  multicover by $c$-intervals},\n  author =\t {René van Bevern and Jiehua Chen and Falk Hüffner and\n                  Stefan Kratsch and Nimrod Talmon and Gerhard\n                  J. Woeginger},\n  url_Preprint =\n                  {https://fpt.akt.tu-berlin.de/publications/c-interval-cover-IPL.pdf},\n  doi =\t\t {10.1016/j.ipl.2015.03.004},\n  issn =\t {0020-0190},\n  year =\t 2015,\n  date =\t {2015-10-01},\n  journal =\t {Information Processing Letters},\n  volume =\t 115,\n  number =\t 10,\n  pages =\t {744--749},\n  abstract =\t {A c-interval is the disjoint union of c intervals over\n                  N. The c-Interval Multicover problem is the special\n                  case of Set Multicover where all sets available for\n                  covering are c-intervals. We strengthen known\n                  APX-hardness results for c-Interval Multicover, show\n                  W[1]-hardness when parameterized by the solution size,\n                  and present fixed-parameter algorithms for alternative\n                  parameterizations.},\n  keywords =\t {scheduling}\n}\n\n
\n
\n\n\n
\n A c-interval is the disjoint union of c intervals over N. The c-Interval Multicover problem is the special case of Set Multicover where all sets available for covering are c-intervals. We strengthen known APX-hardness results for c-Interval Multicover, show W[1]-hardness when parameterized by the solution size, and present fixed-parameter algorithms for alternative parameterizations.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Approximation Algorithms for Mixed, Windy, and Capacitated Arc Routing Problems.\n \n \n \n \n\n\n \n van Bevern, R.; Komusiewicz, C.; and Sorge, M.\n\n\n \n\n\n\n In Italiano, G. F.; and Schmidt, M., editor(s), ATMOS 2015, volume 48, of OpenAccess Series in Informatics (OASIcs), pages 130–143. Schloss Dagstuhl–Leibniz-Zentrum fuer Informatik, 2015.\n \n\n\n\n
\n\n\n\n \n \n \"Approximation slides\n  \n \n \n \"Approximation code\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@incollection{BKS15,\n  title =\t {Approximation Algorithms for Mixed, Windy, and\n                  Capacitated Arc Routing Problems},\n  doi =\t\t {10.4230/OASIcs.ATMOS.2015.130},\n  author =\t {René van Bevern and Christian Komusiewicz and Manuel\n                  Sorge},\n  url_Slides =\n                  {http://rvb.su/pdf/approximation-directed-arc-routing-atmos15.pdf},\n  url_Code =\t {https://gitlab.com/rvb/mwcarp-approx},\n  issn =\t {2190-6807},\n  year =\t {2015},\n  date =\t {2015-09-17},\n  booktitle =\t {ATMOS 2015},\n  editor =\t {Giuseppe F. Italiano and Marie Schmidt},\n  volume =\t {48},\n  pages =\t {130--143},\n  publisher =\t {Schloss Dagstuhl--Leibniz-Zentrum fuer Informatik},\n  series =\t {OpenAccess Series in Informatics (OASIcs)},\n  abstract =\t {We show that any alpha(n)-approximation algorithm\n                  for the n-vertex metric asymmetric Traveling\n                  Salesperson problem yields O(alpha(C))-approximation\n                  algorithms for various mixed, windy, and capacitated\n                  arc routing problems. Herein, C is the number of\n                  weakly-connected components in the subgraph induced\n                  by the positive-demand arcs, a number that can be\n                  expected to be small in applications. In conjunction\n                  with known results, we derive constant-factor\n                  approximations if C is in O(log n) and\n                  O(log(C)/log(log(C)))-approximations in general.},\n  keywords =\t {routing},\n  url_Code =\t {http://gitlab.com/rvb/mwcarp-approx/},\n}\n\n
\n
\n\n\n
\n We show that any alpha(n)-approximation algorithm for the n-vertex metric asymmetric Traveling Salesperson problem yields O(alpha(C))-approximation algorithms for various mixed, windy, and capacitated arc routing problems. Herein, C is the number of weakly-connected components in the subgraph induced by the positive-demand arcs, a number that can be expected to be small in applications. In conjunction with known results, we derive constant-factor approximations if C is in O(log n) and O(log(C)/log(log(C)))-approximations in general.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Complexity of arc routing problems.\n \n \n \n\n\n \n van Bevern, R.; Niedermeier, R.; Sorge, M.; and Weller, M.\n\n\n \n\n\n\n In Corberán, Á.; and Laporte, G., editor(s), Arc Routing, volume 20, of MOS-SIAM Series on Optimization, 2, pages 19–52. SIAM, February 2015.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@InCollection{BNSW15,\n  author =\t {René van Bevern and Rolf Niedermeier and Manuel\n                  Sorge and Mathias Weller},\n  booktitle =\t {Arc Routing},\n  publisher =\t {SIAM},\n  title =\t {Complexity of arc routing problems},\n  year =\t {2015},\n  chapter =\t {2},\n  editor =\t {{\\'{A}}ngel Corberán and Gilbert Laporte},\n  isbn =\t {978-1-61197-366-2},\n  month =\t feb,\n  pages =\t {19--52},\n  series =\t {MOS-SIAM Series on Optimization},\n  type =\t {chapter},\n  volume =\t {20},\n  doi =\t\t {10.1137/1.9781611973679.ch2},\n  file =\t {BNSW14.pdf:BNSW14.pdf:PDF},\n  groups =\t {hs-survey grant},\n  keywords =\t {routing},\n  timestamp =\t {2020-05-19},\n}\n\n\n\n
\n
\n\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n H-Index Manipulation by Merging Articles: Models, Theory, and Experiments.\n \n \n \n \n\n\n \n van Bevern, R.; Komusiewicz, C.; Niedermeier, R.; Sorge, M.; and Walsh, T.\n\n\n \n\n\n\n In Yang, Q.; and Wooldridge, M. J., editor(s), IJCAI 2015, pages 808–814. AAAI Press, 2015.\n \n\n\n\n
\n\n\n\n \n \n \"H-IndexPaper\n  \n \n\n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@incollection{BKN+15,\n  title =\t {H-Index Manipulation by Merging Articles: Models,\n                  Theory, and Experiments},\n  url =\t\t {http://ijcai.org/Abstract/15/119},\n  author =\t {René van Bevern and Christian Komusiewicz and Rolf\n                  Niedermeier and Manuel Sorge and Toby Walsh},\n  isbn =\t {978-1-57735-738-4},\n  year =\t {2015},\n  date =\t {2015-07-01},\n  editor =\t {Qiang Yang and Michael J. Wooldridge},\n  booktitle =\t {IJCAI 2015},\n  pages =\t {808--814},\n  publisher =\t {AAAI Press},\n  abstract =\t {An author's profile on Google Scholar consists of\n                  indexed articles and associated data, such as the\n                  number of citations and the H-index. The author is\n                  allowed to merge articles, which may affect the\n                  H-index. We analyze the parameterized complexity of\n                  maximizing the H-index using article merges. Herein,\n                  to model realistic manipulation scenarios, we define\n                  a compatability graph whose edges correspond to\n                  plausible merges. Moreover, we consider multiple\n                  possible measures for computing the citation count\n                  of a merged article. For the measure used by Google\n                  Scholar, we give an algorithm that maximizes the\n                  H-index in linear time if the compatibility graph\n                  has constant-size connected components. In contrast,\n                  if we allow to merge arbitrary articles, then\n                  already increasing the H-index by one is\n                  NP-hard. Experiments on Google Scholar profiles of\n                  AI researchers show that the H-index can be\n                  manipulated substantially only by merging articles\n                  with highly dissimilar titles, which would be easy\n                  to discover.},\n  keywords =\t {network analysis},\n}\n\n
\n
\n\n\n
\n An author's profile on Google Scholar consists of indexed articles and associated data, such as the number of citations and the H-index. The author is allowed to merge articles, which may affect the H-index. We analyze the parameterized complexity of maximizing the H-index using article merges. Herein, to model realistic manipulation scenarios, we define a compatability graph whose edges correspond to plausible merges. Moreover, we consider multiple possible measures for computing the citation count of a merged article. For the measure used by Google Scholar, we give an algorithm that maximizes the H-index in linear time if the compatibility graph has constant-size connected components. In contrast, if we allow to merge arbitrary articles, then already increasing the H-index by one is NP-hard. Experiments on Google Scholar profiles of AI researchers show that the H-index can be manipulated substantially only by merging articles with highly dissimilar titles, which would be easy to discover.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Interval scheduling and colorful independent sets.\n \n \n \n \n\n\n \n van Bevern, R.; Mnich, M.; Niedermeier, R.; and Weller, M.\n\n\n \n\n\n\n Journal of Scheduling, 18(5): 449-469. 2015.\n \n\n\n\n
\n\n\n\n \n \n \"Interval preprint\n  \n \n \n \"Interval code\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{BMNW15,\n  title =\t {Interval scheduling and colorful independent sets},\n  author =\t {René van Bevern and Matthias Mnich and Rolf\n                  Niedermeier and Mathias Weller},\n  url_Preprint = {http://arxiv.org/abs/1402.0851},\n  doi =\t\t {10.1007/s10951-014-0398-5},\n  url_Code =\t {http://fpt.akt.tu-berlin.de/cis},\n  issn =\t {1094-6136},\n  year =\t 2015,\n  date =\t {2015-10-01},\n  journal =\t {Journal of Scheduling},\n  volume =\t 18,\n  number =\t 5,\n  pages =\t {449-469},\n  publisher =\t {Springer},\n  abstract =\t {Numerous applications in scheduling, such as\n                  resource allocation or steel manufacturing, can be\n                  modeled using the NP-hard Independent Set problem\n                  (given an undirected graph and an integer k, find a\n                  set of at least k pairwise non-adjacent\n                  vertices). Here, one encounters special graph\n                  classes like 2-union graphs (edge-wise unions of two\n                  interval graphs) and strip graphs (edge-wise unions\n                  of an interval graph and a cluster graph), on which\n                  Independent Set remains NP-hard but admits constant\n                  ratio approximations in polynomial time. We study\n                  the parameterized complexity of Independent Set on\n                  2-union graphs and on subclasses like strip\n                  graphs. Our investigations significantly benefit\n                  from a new structural “compactness” parameter of\n                  interval graphs and novel problem formulations using\n                  vertex-colored interval graphs. Our main\n                  contributions are as follows: 1. We show a\n                  complexity dichotomy: restricted to graph classes\n                  closed under induced subgraphs and disjoint unions,\n                  Independent Set is polynomial-time solvable if both\n                  input interval graphs are cluster graphs, and is\n                  NP-hard otherwise. 2. We chart the possibilities and\n                  limits of effective polynomial-time preprocessing\n                  (also known as kernelization). 3. We extend\n                  Halldórsson and Karlsson (2006)’s fixed-parameter\n                  algorithm for Independent Set on strip graphs\n                  parameterized by the structural parameter “maximum\n                  number of live jobs” to show that the problem (also\n                  known as Job Interval Selection) is fixed-parameter\n                  tractable with respect to the parameter k and\n                  generalize their algorithm from strip graphs to\n                  2-union graphs. Preliminary experiments with random\n                  data indicate that Job Interval Selection with up to\n                  15 jobs and 5⋅10^5 intervals can be solved optimally\n                  in less than 5 min.},\n  keywords =\t {scheduling}\n}\n\n
\n
\n\n\n
\n Numerous applications in scheduling, such as resource allocation or steel manufacturing, can be modeled using the NP-hard Independent Set problem (given an undirected graph and an integer k, find a set of at least k pairwise non-adjacent vertices). Here, one encounters special graph classes like 2-union graphs (edge-wise unions of two interval graphs) and strip graphs (edge-wise unions of an interval graph and a cluster graph), on which Independent Set remains NP-hard but admits constant ratio approximations in polynomial time. We study the parameterized complexity of Independent Set on 2-union graphs and on subclasses like strip graphs. Our investigations significantly benefit from a new structural “compactness” parameter of interval graphs and novel problem formulations using vertex-colored interval graphs. Our main contributions are as follows: 1. We show a complexity dichotomy: restricted to graph classes closed under induced subgraphs and disjoint unions, Independent Set is polynomial-time solvable if both input interval graphs are cluster graphs, and is NP-hard otherwise. 2. We chart the possibilities and limits of effective polynomial-time preprocessing (also known as kernelization). 3. We extend Halldórsson and Karlsson (2006)’s fixed-parameter algorithm for Independent Set on strip graphs parameterized by the structural parameter “maximum number of live jobs” to show that the problem (also known as Job Interval Selection) is fixed-parameter tractable with respect to the parameter k and generalize their algorithm from strip graphs to 2-union graphs. Preliminary experiments with random data indicate that Job Interval Selection with up to 15 jobs and 5⋅10^5 intervals can be solved optimally in less than 5 min.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Myhill-Nerode Methods for Hypergraphs.\n \n \n \n \n\n\n \n van Bevern, R.; Downey, R. G.; Fellows, M. R.; Gaspers, S.; and Rosamond, F. A.\n\n\n \n\n\n\n Algorithmica, 73(4): 696-729. 2015.\n \n\n\n\n
\n\n\n\n \n \n \"Myhill-Nerode preprint\n  \n \n \n \"Myhill-Nerode slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@article{BDF+15,\n  title =\t {Myhill-Nerode Methods for Hypergraphs},\n  author =\t {René van Bevern and Rodney G. Downey and Michael\n                  R. Fellows and Serge Gaspers and Frances\n                  A. Rosamond},\n  url_Preprint = {http://arxiv.org/abs/1211.1299},\n  doi =\t\t {10.1007/s00453-015-9977-x},\n  url_Slides =\n                  {http://rvb.su/pdf/myhill-nerode-hypergraphs-isaac13.pdf},\n  issn =\t {0178-4617},\n  year =\t 2015,\n  date =\t {2015-02-27},\n  journal =\t {Algorithmica},\n  volume =\t 73,\n  number =\t 4,\n  pages =\t {696-729},\n  publisher =\t {Springer},\n  abstract =\t {We give an analog of the Myhill–Nerode theorem from\n                  formal language theory for hypergraphs and use it to\n                  derive the following results for two NP-hard\n                  hypergraph problems. (1) We provide an algorithm for\n                  testing whether a hypergraph has cutwidth at most k\n                  that runs in linear time for constant k. In terms of\n                  parameterized complexity theory, the problem is\n                  fixed-parameter linear parameterized by k. (2) We\n                  show that it is not expressible in monadic\n                  second-order logic whether a hypergraph has bounded\n                  (fractional, generalized) hypertree width. The proof\n                  leads us to conjecture that, in terms of\n                  parameterized complexity theory, these problems are\n                  W[1]-hard parameterized by the incidence treewidth\n                  (the treewidth of the incidence graph). Thus, in the\n                  form of the Myhill–Nerode theorem for hypergraphs,\n                  we obtain a method to derive linear-time algorithms\n                  and to obtain indicators for intractability for\n                  hypergraph problems parameterized by incidence\n                  treewidth.},\n  keywords =\t {hypergraphs, invited}\n}\n\n
\n
\n\n\n
\n We give an analog of the Myhill–Nerode theorem from formal language theory for hypergraphs and use it to derive the following results for two NP-hard hypergraph problems. (1) We provide an algorithm for testing whether a hypergraph has cutwidth at most k that runs in linear time for constant k. In terms of parameterized complexity theory, the problem is fixed-parameter linear parameterized by k. (2) We show that it is not expressible in monadic second-order logic whether a hypergraph has bounded (fractional, generalized) hypertree width. The proof leads us to conjecture that, in terms of parameterized complexity theory, these problems are W[1]-hard parameterized by the incidence treewidth (the treewidth of the incidence graph). Thus, in the form of the Myhill–Nerode theorem for hypergraphs, we obtain a method to derive linear-time algorithms and to obtain indicators for intractability for hypergraph problems parameterized by incidence treewidth.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Network-Based Vertex Dissolution.\n \n \n \n \n\n\n \n van Bevern, R.; Bredereck, R.; Chen, J.; Froese, V.; Niedermeier, R.; and Woeginger, G. J.\n\n\n \n\n\n\n SIAM Journal on Discrete Mathematics, 29(2): 888-914. 2015.\n \n\n\n\n
\n\n\n\n \n \n \"Network-Based preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{BBC+15,\n  title =\t {Network-Based Vertex Dissolution},\n  author =\t {René van Bevern and Robert Bredereck and Jiehua Chen\n                  and Vincent Froese and Rolf Niedermeier and Gerhard\n                  J. Woeginger},\n  url_Preprint = {http://arxiv.org/abs/1402.2664},\n  doi =\t\t {10.1137/140978880},\n  year =\t 2015,\n  date =\t {2015-05-01},\n  journal =\t {SIAM Journal on Discrete Mathematics},\n  volume =\t 29,\n  number =\t 2,\n  pages =\t {888-914},\n  abstract =\t {We introduce a graph-theoretic vertex dissolution\n                  model that applies to a number of redistribution\n                  scenarios, such as gerrymandering in political\n                  districting or work balancing in an online\n                  situation. The central aspect of our model is the\n                  deletion of certain vertices and the redistribution\n                  of their load to neighboring vertices in a\n                  completely balanced way. We investigate how the\n                  underlying graph structure, the knowledge of which\n                  vertices should be deleted, and the relation between\n                  old and new vertex loads influence the computational\n                  complexity of the underlying graph problems. Our\n                  results establish a clear borderline between\n                  tractable and intractable cases.},\n  keywords =\t {network analysis}\n}\n\n
\n
\n\n\n
\n We introduce a graph-theoretic vertex dissolution model that applies to a number of redistribution scenarios, such as gerrymandering in political districting or work balancing in an online situation. The central aspect of our model is the deletion of certain vertices and the redistribution of their load to neighboring vertices in a completely balanced way. We investigate how the underlying graph structure, the knowledge of which vertices should be deleted, and the relation between old and new vertex loads influence the computational complexity of the underlying graph problems. Our results establish a clear borderline between tractable and intractable cases.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n On the Parameterized Complexity of Computing Balanced Partitions in Graphs.\n \n \n \n \n\n\n \n van Bevern, R.; Feldmann, A. E.; Sorge, M.; and Suchý, O.\n\n\n \n\n\n\n Theory of Computing Systems, 57(1): 1-35. 2015.\n \n\n\n\n
\n\n\n\n \n \n \"On preprint\n  \n \n \n \"On slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{BFSS15,\n  title =\t {On the Parameterized Complexity of Computing\n                  Balanced Partitions in Graphs},\n  author =\t {René van Bevern and Andreas Emil Feldmann and Manuel\n                  Sorge and Ondřej Suchý},\n  url_Preprint = {http://arxiv.org/abs/1312.7014},\n  doi =\t\t {10.1007/s00224-014-9557-5},\n  url_Slides =\t {http://rvb.su/pdf/bipartition.pdf},\n  issn =\t {1432-4350},\n  year =\t 2015,\n  date =\t {2015-07-08},\n  journal =\t {Theory of Computing Systems},\n  volume =\t 57,\n  number =\t 1,\n  pages =\t {1-35},\n  publisher =\t {Springer},\n  abstract =\t {A balanced partition is a clustering of a graph into\n                  a given number of equal-sized parts. For instance,\n                  the Bisection problem asks to remove at most k edges\n                  in order to partition the vertices into two\n                  equal-sized parts. We prove that Bisection is FPT\n                  for the distance to constant cliquewidth if we are\n                  given the deletion set. This implies FPT algorithms\n                  for some well-studied parameters such as cluster\n                  vertex deletion number and feedback vertex\n                  set. However, we show that Bisection does not admit\n                  polynomial-size kernels for these parameters. For\n                  the Vertex Bisection problem, vertices need to be\n                  removed in order to obtain two equal-sized parts. We\n                  show that this problem is FPT for the number of\n                  removed vertices k if the solution cuts the graph\n                  into a constant number c of connected\n                  components. The latter condition is unavoidable,\n                  since we also prove that Vertex Bisection is\n                  W[1]-hard w.r.t. (k,c). Our algorithms for finding\n                  bisections can easily be adapted to finding\n                  partitions into d equal-sized parts, which entails\n                  additional running time factors of n^O(d). We show\n                  that a substantial speed-up is unlikely since the\n                  corresponding task is W[1]-hard w.r.t. d, even on\n                  forests of maximum degree two. We can, however, show\n                  that it is FPT for the vertex cover number.},\n  keywords =\t {graph modification}\n}\n\n\n
\n
\n\n\n
\n A balanced partition is a clustering of a graph into a given number of equal-sized parts. For instance, the Bisection problem asks to remove at most k edges in order to partition the vertices into two equal-sized parts. We prove that Bisection is FPT for the distance to constant cliquewidth if we are given the deletion set. This implies FPT algorithms for some well-studied parameters such as cluster vertex deletion number and feedback vertex set. However, we show that Bisection does not admit polynomial-size kernels for these parameters. For the Vertex Bisection problem, vertices need to be removed in order to obtain two equal-sized parts. We show that this problem is FPT for the number of removed vertices k if the solution cuts the graph into a constant number c of connected components. The latter condition is unavoidable, since we also prove that Vertex Bisection is W[1]-hard w.r.t. (k,c). Our algorithms for finding bisections can easily be adapted to finding partitions into d equal-sized parts, which entails additional running time factors of n^O(d). We show that a substantial speed-up is unlikely since the corresponding task is W[1]-hard w.r.t. d, even on forests of maximum degree two. We can, however, show that it is FPT for the vertex cover number.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2014\n \n \n (4)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Constant-factor approximations for Capacitated Arc Routing without triangle inequality.\n \n \n \n \n\n\n \n van Bevern, R.; Hartung, S.; Nichterlein, A.; and Sorge, M.\n\n\n \n\n\n\n Operations Research Letters, 42(4): 290-292. 2014.\n \n\n\n\n
\n\n\n\n \n \n \"Constant-factor preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@article{BHNS14,\n  title =\t {Constant-factor approximations for Capacitated Arc\n                  Routing without triangle inequality},\n  author =\t {René van Bevern and Sepp Hartung and André\n                  Nichterlein and Manuel Sorge},\n  url_Preprint = {http://arxiv.org/abs/1404.3660},\n  doi =\t\t {10.1016/j.orl.2014.05.002},\n  issn =\t {0167-6377},\n  year =\t 2014,\n  date =\t {2014-06-01},\n  journal =\t {Operations Research Letters},\n  volume =\t 42,\n  number =\t 4,\n  pages =\t {290-292},\n  abstract =\t {Abstract Given an undirected graph with edge costs\n                  and edge demands, the Capacitated Arc Routing\n                  problem (CARP) asks for minimum-cost routes for\n                  equal-capacity vehicles so as to satisfy all\n                  demands. Constant-factor polynomial-time\n                  approximation algorithms were proposed for CARP with\n                  triangle inequality, while CARP was claimed to be\n                  NP-hard to approximate within any constant factor in\n                  general. Correcting this claim, we show that any\n                  factor α approximation for CARP with triangle\n                  inequality yields a factor α approximation for the\n                  general CARP.},\n  keywords =\t {routing, scheduling}\n}\n\n
\n
\n\n\n
\n Abstract Given an undirected graph with edge costs and edge demands, the Capacitated Arc Routing problem (CARP) asks for minimum-cost routes for equal-capacity vehicles so as to satisfy all demands. Constant-factor polynomial-time approximation algorithms were proposed for CARP with triangle inequality, while CARP was claimed to be NP-hard to approximate within any constant factor in general. Correcting this claim, we show that any factor α approximation for CARP with triangle inequality yields a factor α approximation for the general CARP.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Network-Based Dissolution.\n \n \n \n\n\n \n van Bevern, R.; Bredereck, R.; Chen, J.; Froese, V.; Niedermeier, R.; and Woeginger, G. J.\n\n\n \n\n\n\n In Csuhaj-Varjú, E.; Dietzfelbinger, M.; and Ésik, Z., editor(s), MFCS 2014, volume 8635, of Lecture Notes in Computer Science, pages 69-80. Springer, 2014.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@incollection{BBC+14b,\n  title =\t {Network-Based Dissolution},\n  doi =\t\t {10.1007/978-3-662-44465-8_7},\n  author =\t {René van Bevern and Robert Bredereck and Jiehua Chen\n                  and Vincent Froese and Rolf Niedermeier and Gerhard\n                  J. Woeginger},\n  isbn =\t {978-3-662-44464-1},\n  year =\t {2014},\n  date =\t {2014-08-25},\n  booktitle =\t {MFCS 2014},\n  editor =\t {Erzsébet Csuhaj-Varjú and Martin Dietzfelbinger and Zoltán Ésik},\n  volume =\t {8635},\n  pages =\t {69-80},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {We introduce a graph-theoretic dissolution model\n                  that applies to a number of redistribution scenarios\n                  such as gerrymandering in political districting or\n                  work balancing in an online situation. The central\n                  aspect of our model is the deletion of certain\n                  vertices and the redistribution of their loads to\n                  neighboring vertices in a perfectly balanced way. We\n                  investigate how the underlying graph structure, the\n                  pre-knowledge of which vertices should be deleted,\n                  and the relation between old and new vertex loads\n                  influence the computational complexity of the\n                  underlying graph problems. Our results establish a\n                  clear borderline between tractable and intractable\n                  cases.},\n  keywords =\t {network analysis}\n}\n\n
\n
\n\n\n
\n We introduce a graph-theoretic dissolution model that applies to a number of redistribution scenarios such as gerrymandering in political districting or work balancing in an online situation. The central aspect of our model is the deletion of certain vertices and the redistribution of their loads to neighboring vertices in a perfectly balanced way. We investigate how the underlying graph structure, the pre-knowledge of which vertices should be deleted, and the relation between old and new vertex loads influence the computational complexity of the underlying graph problems. Our results establish a clear borderline between tractable and intractable cases.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Star Partitions of Perfect Graphs.\n \n \n \n\n\n \n van Bevern, R.; Bredereck, R.; Bulteau, L.; Chen, J.; Froese, V.; Niedermeier, R.; and Woeginger, G. J.\n\n\n \n\n\n\n In Esparza, J.; Fraigniaud, P.; Husfeldt, T.; and Koutsoupias, E., editor(s), ICALP 2014, volume 8572, of Lecture Notes in Computer Science, pages 174-185. Springer, 2014.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@incollection{BBB+14,\n  title =\t {Star Partitions of Perfect Graphs},\n  doi =\t\t {10.1007/978-3-662-43948-7_15},\n  author =\t {René van Bevern and Robert Bredereck and Laurent\n                  Bulteau and Jiehua Chen and Vincent Froese and Rolf\n                  Niedermeier and Gerhard J. Woeginger},\n  isbn =\t {978-3-662-43947-0},\n  year =\t {2014},\n  date =\t {2014-07-01},\n  booktitle =\t {ICALP 2014},\n  editor =\t {Javier Esparza and Pierre Fraigniaud and Thore Husfeldt and Elias Koutsoupias},\n  volume =\t {8572},\n  pages =\t {174-185},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {The partition of graphs into nice subgraphs is a\n                  central algorithmic problem with strong ties to\n                  matching theory. We study the partitioning of\n                  undirected graphs into stars, a problem known to be\n                  NP-complete even for the case of stars on three\n                  vertices. We perform a thorough computational\n                  complexity study of the problem on subclasses of\n                  perfect graphs and identify several polynomial-time\n                  solvable cases, for example, on interval graphs and\n                  bipartite permutation graphs, and also NP-hard\n                  cases, for example, on grid graphs and chordal\n                  graphs.},\n  keywords =\t {algorithmic graph theory}\n}\n\n
\n
\n\n\n
\n The partition of graphs into nice subgraphs is a central algorithmic problem with strong ties to matching theory. We study the partitioning of undirected graphs into stars, a problem known to be NP-complete even for the case of stars on three vertices. We perform a thorough computational complexity study of the problem on subclasses of perfect graphs and identify several polynomial-time solvable cases, for example, on interval graphs and bipartite permutation graphs, and also NP-hard cases, for example, on grid graphs and chordal graphs.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Towards Optimal and Expressive Kernelization for $d$-Hitting Set.\n \n \n \n \n\n\n \n van Bevern, R.\n\n\n \n\n\n\n Algorithmica, 70(1): 129-147. 2014.\n \n\n\n\n
\n\n\n\n \n \n \"Towards preprint\n  \n \n \n \"Towards slides\n  \n \n \n \"Towards code\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@article{Bev14b,\n  title =\t {Towards Optimal and Expressive Kernelization for\n                  $d$-Hitting Set},\n  author =\t {René van Bevern},\n  url_Preprint = {http://arxiv.org/abs/1112.2310v3},\n  doi =\t\t {10.1007/s00453-013-9774-3},\n  url_Slides =\n                  {http://rvb.su/pdf/towards-optimal-and-expressive-kernelization-for-hitting-set-63tt.pdf},\n  url_Code =\t {http://fpt.akt.tu-berlin.de/hslinkern/},\n  issn =\t {0178-4617},\n  year =\t 2014,\n  date =\t {2014-09-01},\n  journal =\t {Algorithmica},\n  volume =\t 70,\n  number =\t 1,\n  pages =\t {129-147},\n  publisher =\t {Springer},\n  abstract =\t {A sunflower in a hypergraph is a set of hyperedges\n                  pairwise intersecting in exactly the same vertex\n                  set. Sunflowers are a useful tool in polynomial-time\n                  data reduction for problems formalizable as\n                  d-Hitting Set, the problem of covering all\n                  hyperedges (whose cardinality is bounded from above\n                  by a constant d) of a hypergraph by at most k\n                  vertices. Additionally, in fault diagnosis,\n                  sunflowers yield concise explanations for “highly\n                  defective structures”. We provide a linear-time\n                  algorithm that, by finding sunflowers, transforms an\n                  instance of d-Hitting Set into an equivalent\n                  instance comprising at most O(k^d) hyperedges and\n                  vertices. In terms of parameterized complexity, we\n                  show a problem kernel with asymptotically optimal\n                  size (unless coNP⊆NP/poly) and provide experimental\n                  results that show the practical applicability of our\n                  algorithm. Finally, we show that the number of\n                  vertices can be reduced to O(k^(d−1)) with\n                  additional processing in O(k^(1.5d))\n                  time—nontrivially combining the sunflower technique\n                  with problem kernels due to Abu-Khzam and Moser.},\n  keywords =\t {hypergraphs, invited}\n}\n\n
\n
\n\n\n
\n A sunflower in a hypergraph is a set of hyperedges pairwise intersecting in exactly the same vertex set. Sunflowers are a useful tool in polynomial-time data reduction for problems formalizable as d-Hitting Set, the problem of covering all hyperedges (whose cardinality is bounded from above by a constant d) of a hypergraph by at most k vertices. Additionally, in fault diagnosis, sunflowers yield concise explanations for “highly defective structures”. We provide a linear-time algorithm that, by finding sunflowers, transforms an instance of d-Hitting Set into an equivalent instance comprising at most O(k^d) hyperedges and vertices. In terms of parameterized complexity, we show a problem kernel with asymptotically optimal size (unless coNP⊆NP/poly) and provide experimental results that show the practical applicability of our algorithm. Finally, we show that the number of vertices can be reduced to O(k^(d−1)) with additional processing in O(k^(1.5d)) time—nontrivially combining the sunflower technique with problem kernels due to Abu-Khzam and Moser.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2013\n \n \n (4)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n A Parameterized Complexity Analysis of Combinatorial Feature Selection Problems.\n \n \n \n\n\n \n Froese, V.; van Bevern, R.; Niedermeier, R.; and Sorge, M.\n\n\n \n\n\n\n In Chatterjee, K.; and Sgall, J., editor(s), MFCS 2013, volume 8087, of Lecture Notes in Computer Science, pages 445-456. Springer, 2013.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n\n\n\n
\n
@incollection{FBNS13,\n  title =\t {A Parameterized Complexity Analysis of Combinatorial\n                  Feature Selection Problems},\n  doi =\t\t {10.1007/978-3-642-40313-2_40},\n  author =\t {Vincent Froese and René van Bevern and Rolf\n                  Niedermeier and Manuel Sorge},\n  isbn =\t {978-3-642-40312-5},\n  year =\t {2013},\n  date =\t {2013-08-26},\n  booktitle =\t {MFCS 2013},\n  editor =\t {Krishnendu Chatterjee and Jirí Sgall},\n  volume =\t {8087},\n  pages =\t {445-456},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {We examine the algorithmic tractability of NP-hard\n                  combinatorial feature selection problems in terms of\n                  parameterized complexity theory. In combinatorial\n                  feature selection, one seeks to discard dimensions\n                  from high-dimensional data such that the resulting\n                  instances fulfill a desired property. In\n                  parameterized complexity analysis, one seeks to\n                  identify relevant problem-specific quantities and\n                  tries to determine their influence on the\n                  computational complexity of the considered\n                  problem. In this paper, for various combinatorial\n                  feature selection problems, we identify\n                  parameterizations and reveal to what extent these\n                  govern computational complexity. We provide\n                  tractability as well as intractability results; for\n                  example, we show that the Distinct Vectors problem\n                  on binary points is polynomial-time solvable if each\n                  pair of points differs in at most three dimensions,\n                  whereas it is NP-hard otherwise.},\n}\n\n
\n
\n\n\n
\n We examine the algorithmic tractability of NP-hard combinatorial feature selection problems in terms of parameterized complexity theory. In combinatorial feature selection, one seeks to discard dimensions from high-dimensional data such that the resulting instances fulfill a desired property. In parameterized complexity analysis, one seeks to identify relevant problem-specific quantities and tries to determine their influence on the computational complexity of the considered problem. In this paper, for various combinatorial feature selection problems, we identify parameterizations and reveal to what extent these govern computational complexity. We provide tractability as well as intractability results; for example, we show that the Distinct Vectors problem on binary points is polynomial-time solvable if each pair of points differs in at most three dimensions, whereas it is NP-hard otherwise.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Myhill-Nerode Methods for Hypergraphs.\n \n \n \n \n\n\n \n van Bevern, R.; Fellows, M. R.; Gaspers, S.; and Rosamond, F. A.\n\n\n \n\n\n\n In Cai, L.; Cheng, S.; and Lam, T., editor(s), ISAAC 2013, volume 8283, of Lecture Notes in Computer Science, pages 372-382. Springer, 2013.\n \n\n\n\n
\n\n\n\n \n \n \"Myhill-Nerode slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@incollection{BFGR13,\n  title =\t {Myhill-Nerode Methods for Hypergraphs},\n  doi =\t\t {10.1007/978-3-642-45030-3_35},\n  author =\t {René van Bevern and Michael R. Fellows and Serge\n                  Gaspers and Frances A. Rosamond},\n  url_Slides =\n                  {http://rvb.su/pdf/myhill-nerode-hypergraphs-isaac13.pdf},\n  isbn =\t {978-3-642-45029-7},\n  year =\t {2013},\n  date =\t {2013-12-13},\n  booktitle =\t {ISAAC 2013},\n  editor =\t {Leizhen Cai and Siu-Wing Cheng and Tak-Wah Lam},\n  volume =\t {8283},\n  pages =\t {372-382},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {We introduce a method of applying Myhill-Nerode\n                  methods from formal language theory to hypergraphs\n                  and show how this method can be used to obtain the\n                  following parameterized complexity\n                  results. Hypergraph Cutwidth (deciding whether a\n                  hypergraph on n vertices has cutwidth at most k) is\n                  linear-time solvable for constant k. For hypergraphs\n                  of constant incidence treewidth (treewidth of the\n                  incidence graph), Hypertree Width and variants\n                  cannot be solved by simple finite tree automata. The\n                  proof leads us to conjecture that Hypertree Width is\n                  W[1]-hard for this parameter.},\n  keywords =\t {hypergraphs}\n}\n\n
\n
\n\n\n
\n We introduce a method of applying Myhill-Nerode methods from formal language theory to hypergraphs and show how this method can be used to obtain the following parameterized complexity results. Hypergraph Cutwidth (deciding whether a hypergraph on n vertices has cutwidth at most k) is linear-time solvable for constant k. For hypergraphs of constant incidence treewidth (treewidth of the incidence graph), Hypertree Width and variants cannot be solved by simple finite tree automata. The proof leads us to conjecture that Hypertree Width is W[1]-hard for this parameter.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n On the Parameterized Complexity of Computing Graph Bisections.\n \n \n \n \n\n\n \n van Bevern, R.; Feldmann, A. E.; Sorge, M.; and Suchý, O.\n\n\n \n\n\n\n In Brandstädt, A.; Jansen, K.; and Reischuk, R., editor(s), WG 2013, volume 8165, of Lecture Notes in Computer Science, pages 76-87. Springer, 2013.\n \n\n\n\n
\n\n\n\n \n \n \"On slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@incollection{BFSS13,\n  title =\t {On the Parameterized Complexity of Computing Graph\n                  Bisections},\n  doi =\t\t {10.1007/978-3-642-45043-3_8},\n  author =\t {René van Bevern and Andreas Emil Feldmann and Manuel\n                  Sorge and Ondřej Suchý},\n  isbn =\t {978-3-642-45042-6},\n  year =\t {2013},\n  date =\t {2013-06-19},\n  booktitle =\t {WG 2013},\n  editor =\t {Andreas Brandstädt and Klaus Jansen and Rüdiger Reischuk},\n  volume =\t {8165},\n  pages =\t {76-87},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {The Bisection problem asks for a partition of the\n                  vertices of a graph into two equally sized sets,\n                  while minimizing the cut size. This is the number of\n                  edges connecting the two vertex sets. Bisection has\n                  been thoroughly studied in the past. However, only\n                  few results have been published that consider the\n                  parameterized complexity of this problem. We show\n                  that Bisection is FPT w.r.t. the minimum cut size if\n                  there is an optimum bisection that cuts into a given\n                  constant number of connected components. Our\n                  algorithm applies to the more general Balanced\n                  Biseparator problem where vertices need to be\n                  removed instead of edges. We prove that this problem\n                  is W[1]-hard w.r.t. the minimum cut size and the\n                  number of cut out components. For Bisection we\n                  further show that no polynomial-size kernels exist\n                  for the cut size parameter. In fact, we show this\n                  for all parameters that are polynomial in the input\n                  size and that do not increase when taking disjoint\n                  unions of graphs. We prove fixed-parameter\n                  tractability for the distance to constant\n                  cliquewidth if we are given the deletion set. This\n                  implies fixed-parameter algorithms for some\n                  well-studied parameters such as cluster vertex\n                  deletion number and feedback vertex set.},\n  url_Slides =\t {http://rvb.su/pdf/bipartition.pdf},\n  keywords =\t {graph modification}\n}\n\n
\n
\n\n\n
\n The Bisection problem asks for a partition of the vertices of a graph into two equally sized sets, while minimizing the cut size. This is the number of edges connecting the two vertex sets. Bisection has been thoroughly studied in the past. However, only few results have been published that consider the parameterized complexity of this problem. We show that Bisection is FPT w.r.t. the minimum cut size if there is an optimum bisection that cuts into a given constant number of connected components. Our algorithm applies to the more general Balanced Biseparator problem where vertices need to be removed instead of edges. We prove that this problem is W[1]-hard w.r.t. the minimum cut size and the number of cut out components. For Bisection we further show that no polynomial-size kernels exist for the cut size parameter. In fact, we show this for all parameters that are polynomial in the input size and that do not increase when taking disjoint unions of graphs. We prove fixed-parameter tractability for the distance to constant cliquewidth if we are given the deletion set. This implies fixed-parameter algorithms for some well-studied parameters such as cluster vertex deletion number and feedback vertex set.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Parameterized Complexity of DAG Partitioning.\n \n \n \n\n\n \n van Bevern, R.; Bredereck, R.; Chopin, M.; Hartung, S.; Hüffner, F.; Nichterlein, A.; and Suchý, O.\n\n\n \n\n\n\n In Spirakis, P. G.; and Serna, M., editor(s), CIAC 2013, volume 7878, of Lecture Notes in Computer Science, pages 49-60. Springer, 2013.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@incollection{BBC+13,\n  title =\t {Parameterized Complexity of {DAG} Partitioning},\n  doi =\t\t {10.1007/978-3-642-38233-8_5},\n  author =\t {René van Bevern and Robert Bredereck and Morgan\n                  Chopin and Sepp Hartung and Falk Hüffner and André\n                  Nichterlein and Ondřej Suchý},\n  isbn =\t {978-3-642-38232-1},\n  year =\t {2013},\n  date =\t {2013-05-22},\n  booktitle =\t {CIAC 2013},\n  editor =\t {Paul G. Spirakis and Maria Serna},\n  volume =\t {7878},\n  pages =\t {49-60},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {The goal of tracking the origin of short,\n                  distinctive phrases (memes) that propagate through\n                  the web in reaction to current events has been\n                  formalized as DAG Partitioning: given a directed\n                  acyclic graph, delete edges of minimum weight such\n                  that each resulting connected component of the\n                  underlying undirected graph contains only one\n                  sink. Motivated by NP-hardness and hardness of\n                  approximation results, we consider the parameterized\n                  complexity of this problem. We show that it can be\n                  solved in O(2^k·n^2) time, where k is the number of\n                  edge deletions, proving fixed-parameter tractability\n                  for parameter k. We then show that unless the\n                  Exponential Time Hypothesis (ETH) fails, this cannot\n                  be improved to 2^o(k)·n^O(1); further, DAG\n                  Partitioning does not have a polynomial kernel\n                  unless NP ⊆ coNP/poly. Finally, given a tree\n                  decomposition of width w, we show how to solve DAG\n                  Partitioning in 2^O(w^2)⋅n time, improving a known\n                  algorithm for the parameter pathwidth.},\n  keywords =\t {graph modification, network analysis},\n}\n\n
\n
\n\n\n
\n The goal of tracking the origin of short, distinctive phrases (memes) that propagate through the web in reaction to current events has been formalized as DAG Partitioning: given a directed acyclic graph, delete edges of minimum weight such that each resulting connected component of the underlying undirected graph contains only one sink. Motivated by NP-hardness and hardness of approximation results, we consider the parameterized complexity of this problem. We show that it can be solved in O(2^k·n^2) time, where k is the number of edge deletions, proving fixed-parameter tractability for parameter k. We then show that unless the Exponential Time Hypothesis (ETH) fails, this cannot be improved to 2^o(k)·n^O(1); further, DAG Partitioning does not have a polynomial kernel unless NP ⊆ coNP/poly. Finally, given a tree decomposition of width w, we show how to solve DAG Partitioning in 2^O(w^2)⋅n time, improving a known algorithm for the parameter pathwidth.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2012\n \n \n (5)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n A new view on Rural Postman based on Eulerian Extension and Matching.\n \n \n \n \n\n\n \n Sorge, M.; van Bevern, R.; Niedermeier, R.; and Weller, M.\n\n\n \n\n\n\n Journal of Discrete Algorithms, 16: 12–33. 2012.\n \n\n\n\n
\n\n\n\n \n \n \"A preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n \n \n\n\n\n
\n
@article{SBNW12,\n  title =\t {A new view on Rural Postman based on Eulerian\n                  Extension and Matching},\n  author =\t {Manuel Sorge and René van Bevern and Rolf Niedermeier\n                  and Mathias Weller},\n  url_Preprint =\n                  {http://fpt.akt.tu-berlin.de/publications/A_New_View_on_Rural_Postman_Based_on_Eulerian_Extension_and_Matching-JDA.pdf},\n  doi =\t\t {10.1016/j.jda.2012.04.007},\n  issn =\t {1570-8667},\n  year =\t 2012,\n  date =\t {2012-10-01},\n  journal =\t {Journal of Discrete Algorithms},\n  volume =\t 16,\n  pages =\t {12--33},\n  abstract =\t {We provide a new characterization of the NP-hard arc\n                  routing problem Rural Postman in terms of a\n                  constrained variant of minimum-weight perfect matching\n                  on bipartite graphs. To this end, we employ a\n                  parameterized equivalence between Rural Postman and\n                  Eulerian Extension, a natural arc addition problem in\n                  directed multigraphs. We indicate the NP-hardness of\n                  the introduced matching problem. In particular, we use\n                  the matching problem to make partial progress towards\n                  answering the open question about the parameterized\n                  complexity of Rural Postman with respect to the\n                  parameter “number of weakly connected components in\n                  the graph induced by the required arcs”. This is a\n                  more than thirty years open and long-neglected\n                  question with significant practical relevance.},\n  keywords =\t {graph modification, invited, routing}\n}\n\n
\n
\n\n\n
\n We provide a new characterization of the NP-hard arc routing problem Rural Postman in terms of a constrained variant of minimum-weight perfect matching on bipartite graphs. To this end, we employ a parameterized equivalence between Rural Postman and Eulerian Extension, a natural arc addition problem in directed multigraphs. We indicate the NP-hardness of the introduced matching problem. In particular, we use the matching problem to make partial progress towards answering the open question about the parameterized complexity of Rural Postman with respect to the parameter “number of weakly connected components in the graph induced by the required arcs”. This is a more than thirty years open and long-neglected question with significant practical relevance.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Approximation and Tidying—A Problem Kernel for $s$-Plex Cluster Vertex Deletion.\n \n \n \n \n\n\n \n van Bevern, R.; Moser, H.; and Niedermeier, R.\n\n\n \n\n\n\n Algorithmica, 62(3-4): 930-950. 2012.\n \n\n\n\n
\n\n\n\n \n \n \"Approximation preprint\n  \n \n \n \"Approximation slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{BMN12,\n  title =\t {Approximation and Tidying---A Problem Kernel for\n                  $s$-Plex Cluster Vertex Deletion},\n  author =\t {René van Bevern and Hannes Moser and Rolf Niedermeier},\n  url_Preprint =\n                  {http://www.akt.tu-berlin.de/fileadmin/fg34/publications-akt/approximation-and-tidying.pdf},\n  url_Slides =\t {http://rvb.su/pdf/kernelization-through-tidying-talk-latin10.pdf},\n  doi =\t\t {10.1007/s00453-011-9492-7},\n  issn =\t {0178-4617},\n  year =\t 2012,\n  date =\t {2012-04-01},\n  journal =\t {Algorithmica},\n  volume =\t 62,\n  number =\t {3-4},\n  pages =\t {930-950},\n  publisher =\t {Springer},\n  abstract =\t {We introduce the NP-hard graph-based data clustering\n                  problem s-Plex Cluster Vertex Deletion, where the task\n                  is to delete at most k vertices from a graph so that\n                  the connected components of the resulting graph are\n                  s-plexes. In an s-plex, every vertex has an edge to\n                  all but at most s−1 other vertices; cliques are\n                  1-plexes. We propose a new method based on\n                  “approximation and tidying” for kernelizing vertex\n                  deletion problems whose goal graphs can be\n                  characterized by forbidden induced subgraphs. The\n                  method exploits polynomial-time approximation results\n                  and thus provides a useful link between approximation\n                  and kernelization. Employing “approximation and\n                  tidying”, we develop data reduction rules that, in\n                  O(ksn^2) time, transform an s-Plex Cluster Vertex\n                  Deletion instance with n vertices into an equivalent\n                  instance with O(k^2 s^3) vertices, yielding a problem\n                  kernel. To this end, we also show how to exploit\n                  structural properties of the specific problem in order\n                  to significantly improve the running time of the\n                  proposed kernelization method.},\n  keywords =\t {graph modification}\n}\n\n
\n
\n\n\n
\n We introduce the NP-hard graph-based data clustering problem s-Plex Cluster Vertex Deletion, where the task is to delete at most k vertices from a graph so that the connected components of the resulting graph are s-plexes. In an s-plex, every vertex has an edge to all but at most s−1 other vertices; cliques are 1-plexes. We propose a new method based on “approximation and tidying” for kernelizing vertex deletion problems whose goal graphs can be characterized by forbidden induced subgraphs. The method exploits polynomial-time approximation results and thus provides a useful link between approximation and kernelization. Employing “approximation and tidying”, we develop data reduction rules that, in O(ksn^2) time, transform an s-Plex Cluster Vertex Deletion instance with n vertices into an equivalent instance with O(k^2 s^3) vertices, yielding a problem kernel. To this end, we also show how to exploit structural properties of the specific problem in order to significantly improve the running time of the proposed kernelization method.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n Interval Scheduling and Colorful Independent Sets.\n \n \n \n\n\n \n van Bevern, R.; Mnich, M.; Niedermeier, R.; and Weller, M.\n\n\n \n\n\n\n In Chao, K.; Hsu, T.; and Lee, D., editor(s), ISAAC 2012, volume 7676, of Lecture Notes in Computer Science, pages 247–256. Springer, 2012.\n \n\n\n\n
\n\n\n\n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@incollection{BMNW12,\n  title =\t {Interval Scheduling and Colorful Independent Sets},\n  doi =\t\t {10.1007/978-3-642-35261-4_28},\n  author =\t {René van Bevern and Matthias Mnich and Rolf\n                  Niedermeier and Mathias Weller},\n  isbn =\t {978-3-642-35260-7},\n  year =\t {2012},\n  date =\t {2012-12-19},\n  booktitle =\t {ISAAC 2012},\n  editor =\t {Kun-Mao Chao and Tsan-sheng Hsu and Der-Tsai Lee},\n  volume =\t {7676},\n  pages =\t {247--256},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {The NP-hard Independent Set problem is to determine\n                  for a given graph G and an integer k whether G\n                  contains a set of k pairwise non-adjacent\n                  vertices. The problem has numerous applications in\n                  scheduling, including resource allocation and steel\n                  manufacturing. There, one encounters restricted\n                  graph classes such as 2-union graphs, which are\n                  edge-wise unions of two interval graphs on the same\n                  vertex set, or strip graphs, where additionally one\n                  of the two interval graphs is a disjoint union of\n                  cliques. We prove NP-hardness of Independent Set on\n                  a very restricted subclass of 2-union graphs and\n                  identify natural parameterizations to chart the\n                  possibilities and limitations of effective\n                  polynomial-time preprocessing (kernelization) and\n                  fixed-parameter algorithms. Our algorithms benefit\n                  from novel formulations of the computational\n                  problems in terms of (list-)colored interval\n                  graphs.},\n  keywords =\t {scheduling}\n}\n\n
\n
\n\n\n
\n The NP-hard Independent Set problem is to determine for a given graph G and an integer k whether G contains a set of k pairwise non-adjacent vertices. The problem has numerous applications in scheduling, including resource allocation and steel manufacturing. There, one encounters restricted graph classes such as 2-union graphs, which are edge-wise unions of two interval graphs on the same vertex set, or strip graphs, where additionally one of the two interval graphs is a disjoint union of cliques. We prove NP-hardness of Independent Set on a very restricted subclass of 2-union graphs and identify natural parameterizations to chart the possibilities and limitations of effective polynomial-time preprocessing (kernelization) and fixed-parameter algorithms. Our algorithms benefit from novel formulations of the computational problems in terms of (list-)colored interval graphs.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Linear-Time Computation of a Linear Problem Kernel for Dominating Set on Planar Graphs.\n \n \n \n \n\n\n \n van Bevern, R.; Hartung, S.; Kammer, F.; Niedermeier, R.; and Weller, M.\n\n\n \n\n\n\n In Marx, D.; and Rossmanith, P., editor(s), IPEC 2011, volume 7112, of Lecture Notes in Computer Science, pages 194–206. Springer, 2012.\n \n\n\n\n
\n\n\n\n \n \n \"Linear-Time preprint\n  \n \n \n \"Linear-Time slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@incollection{BHK+11,\n  title =\t {Linear-Time Computation of a Linear Problem Kernel\n                  for Dominating Set on Planar Graphs},\n  author =\t {René van Bevern and Sepp Hartung and Frank Kammer\n                  and Rolf Niedermeier and Mathias Weller},\n  url_Preprint =\n                  {http://fpt.akt.tu-berlin.de/publications/Linear-Time_Computation_of_a_Linear_Problem_Kernel_for_Dominating_set_on_Planar_Graphs-IPEC.pdf},\n  url_Slides =\n                  {http://rvb.su/pdf/linear-time-linear-kernel-dominating-set.pdf},\n  doi =\t\t {10.1007/978-3-642-28050-4_16},\n  isbn =\t {978-3-642-28049-8},\n  year =\t {2012},\n  date =\t {2012-09-06},\n  booktitle =\t {IPEC 2011},\n  editor =\t {Dániel Marx and Peter Rossmanith},\n  volume =\t {7112},\n  pages =\t {194--206},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {We present a linear-time kernelization algorithm\n                  that transforms a given planar graph G with\n                  domination number γ(G) into a planar graph G' of\n                  size O(γ(G)) with γ(G) = γ(G'). In addition, a\n                  minimum dominating set for G can be inferred from a\n                  minimum dominating set for G'. In terms of\n                  parameterized algorithmics, this implies a\n                  linear-size problem kernel for the NP-hard\n                  Dominating Set problem on planar graphs, where the\n                  kernelization takes linear time. This improves on\n                  previous kernelization algorithms that provide\n                  linear-size kernels in cubic time.},\n  keywords =\t {algorithmic graph theory}\n}\n\n
\n
\n\n\n
\n We present a linear-time kernelization algorithm that transforms a given planar graph G with domination number γ(G) into a planar graph G' of size O(γ(G)) with γ(G) = γ(G'). In addition, a minimum dominating set for G can be inferred from a minimum dominating set for G'. In terms of parameterized algorithmics, this implies a linear-size problem kernel for the NP-hard Dominating Set problem on planar graphs, where the kernelization takes linear time. This improves on previous kernelization algorithms that provide linear-size kernels in cubic time.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Towards Optimal and Expressive Kernelization for $d$-Hitting Set.\n \n \n \n \n\n\n \n van Bevern, R.\n\n\n \n\n\n\n In Gudmundsson, J.; Mestre, J.; and Viglas, T., editor(s), COCOON 2012, volume 7434, of Lecture Notes in Computer Science, pages 121-132. Springer, 2012.\n \n\n\n\n
\n\n\n\n \n \n \"Towards slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@incollection{Bev12,\n  title =\t {Towards Optimal and Expressive Kernelization for\n                  $d$-Hitting Set},\n  doi =\t\t {10.1007/978-3-642-32241-9_11},\n  author =\t {René van Bevern},\n  url_Slides =\n                  {http://rvb.su/pdf/towards-optimal-and-expressive-kernelization-for-hitting-set-63tt.pdf},\n  isbn =\t {978-3-642-32240-2},\n  year =\t {2012},\n  date =\t {2012-08-20},\n  booktitle =\t {COCOON 2012},\n  editor =\t {Joachim Gudmundsson and Julián Mestre and Taso Viglas},\n  volume =\t {7434},\n  pages =\t {121-132},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {A sunflower in a hypergraph is a set of hyperedges\n                  pairwise intersecting in exactly the same vertex\n                  set. Sunflowers are a useful tool in polynomial-time\n                  data reduction for problems formalizable as\n                  d-Hitting Set, the problem of covering all\n                  hyperedges (of cardinality at most d) of a\n                  hypergraph by at most k vertices. Additionally, in\n                  fault diagnosis, sunflowers yield concise\n                  explanations for “highly defective structures”. We\n                  provide a linear-time algorithm that, by finding\n                  sunflowers, transforms an instance of d-Hitting Set\n                  into an equivalent instance comprising at most\n                  O(k^d) hyperedges and vertices. In terms of\n                  parameterized complexity, we show a problem kernel\n                  with asymptotically optimal size (unless\n                  coNP ⊆ NP/poly). We show that the number of vertices\n                  can be reduced to O(k^(d−1)) with additional\n                  processing in O(k^(1.5d)) time—nontrivially\n                  combining the sunflower technique with problem\n                  kernels due to Abu-Khzam and Moser.},\n  keywords =\t {hypergraphs}\n}\n\n
\n
\n\n\n
\n A sunflower in a hypergraph is a set of hyperedges pairwise intersecting in exactly the same vertex set. Sunflowers are a useful tool in polynomial-time data reduction for problems formalizable as d-Hitting Set, the problem of covering all hyperedges (of cardinality at most d) of a hypergraph by at most k vertices. Additionally, in fault diagnosis, sunflowers yield concise explanations for “highly defective structures”. We provide a linear-time algorithm that, by finding sunflowers, transforms an instance of d-Hitting Set into an equivalent instance comprising at most O(k^d) hyperedges and vertices. In terms of parameterized complexity, we show a problem kernel with asymptotically optimal size (unless coNP ⊆ NP/poly). We show that the number of vertices can be reduced to O(k^(d−1)) with additional processing in O(k^(1.5d)) time—nontrivially combining the sunflower technique with problem kernels due to Abu-Khzam and Moser.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2011\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n From Few Components to an Eulerian Graph by Adding Arcs.\n \n \n \n \n\n\n \n Sorge, M.; van Bevern, R.; Niedermeier, R.; and Weller, M.\n\n\n \n\n\n\n In Kolman, P.; and Kratochvíl, J., editor(s), WG 2011, volume 6986, of Lecture Notes in Computer Science, pages 307–318. Springer, 2011.\n \n\n\n\n
\n\n\n\n \n \n \"From preprint\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n  \n \n 1 download\n \n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@incollection{SBNW11,\n  title =\t {From Few Components to an Eulerian Graph by Adding\n                  Arcs},\n  author =\t {Manuel Sorge and René van Bevern and Rolf\n                  Niedermeier and Mathias Weller},\n  url_Preprint =\n                  {http://fpt.akt.tu-berlin.de/publications/From_Few_Components_to_an_Eulerian_Graph_by_Adding_Arcs-WG.pdf},\n  doi =\t\t {10.1007/978-3-642-25870-1_28},\n  isbn =\t {978-3-642-25869-5},\n  year =\t {2011},\n  date =\t {2011-06-21},\n  booktitle =\t {WG 2011},\n  editor =\t {Petr Kolman and Jan Kratochvíl},\n  volume =\t {6986},\n  pages =\t {307--318},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {Eulerian Extension (EE) is the problem to make an\n                  arc-weighted directed multigraph Eulerian by adding\n                  arcs of minimum total cost. EE is NP-hard and has\n                  been shown fixed-parameter tractable with respect to\n                  the number of arc additions. Complementing this\n                  result, on the way to answering an open question, we\n                  show that EE is fixed-parameter tractable with\n                  respect to the combined parameter “number of\n                  connected components in the underlying undirected\n                  multigraph” and “sum of indeg(v) - outdeg(v) over\n                  all vertices v in the input multigraph where this\n                  value is positive.” Moreover, we show that EE is\n                  unlikely to admit a polynomial-size problem kernel\n                  for this parameter combination and for the parameter\n                  “number of arc additions”.},\n  keywords =\t {graph modification, routing}\n}\n\n
\n
\n\n\n
\n Eulerian Extension (EE) is the problem to make an arc-weighted directed multigraph Eulerian by adding arcs of minimum total cost. EE is NP-hard and has been shown fixed-parameter tractable with respect to the number of arc additions. Complementing this result, on the way to answering an open question, we show that EE is fixed-parameter tractable with respect to the combined parameter “number of connected components in the underlying undirected multigraph” and “sum of indeg(v) - outdeg(v) over all vertices v in the input multigraph where this value is positive.” Moreover, we show that EE is unlikely to admit a polynomial-size problem kernel for this parameter combination and for the parameter “number of arc additions”.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Parameterized Algorithmics for Finding Connected Motifs in Biological Networks.\n \n \n \n \n\n\n \n Betzler, N.; van Bevern, R.; Fellows, M. R.; Komusiewicz, C.; and Niedermeier, R.\n\n\n \n\n\n\n IEEE/ACM Transactions on Computational Biology and Bioinformatics, 8(5): 1296-1308. 2011.\n \n\n\n\n
\n\n\n\n \n \n \"Parameterized preprint\n  \n \n \n \"Parameterized code\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@article{BBF+11,\n  title =\t {Parameterized Algorithmics for Finding Connected\n                  Motifs in Biological Networks},\n  author =\t {Nadja Betzler and René van Bevern and Michael\n                  R. Fellows and Christian Komusiewicz and Rolf\n                  Niedermeier},\n  url_Preprint =\n                  {http://www.akt.tu-berlin.de/fileadmin/fg34/publications-akt/finding-connected-motifs-in-biological-networks.pdf},\n  url_Code =\t {http://fpt.akt.tu-berlin.de/graph-motif/},\n  doi =\t\t {10.1109/TCBB.2011.19},\n  issn =\t {1545-5963},\n  year =\t 2011,\n  date =\t {2011-09-01},\n  journal =\t {IEEE/ACM Transactions on Computational Biology and\n                  Bioinformatics},\n  volume =\t 8,\n  number =\t 5,\n  pages =\t {1296-1308},\n  abstract =\t {We study the NP-hard LIST-COLORED GRAPH MOTIF problem\n                  which, given an undirected list-colored graph G = (V,\n                  E) and a multiset M of colors, asks for\n                  maximum-cardinality sets S ⊆ V and M' ⊆ M such that\n                  G[S] is connected and contains exactly (with respect\n                  to multiplicity) the colors in M'. LIST-COLORED GRAPH\n                  MOTIF has applications in the analysis of biological\n                  networks. We study LIST-COLORED GRAPH MOTIF with\n                  respect to three different parameterizations. For the\n                  parameters motif size |M| and solution size |S|, we\n                  present fixed-parameter algorithms, whereas for the\n                  parameter |V| - |M|, we show W[1]-hardness for general\n                  instances and achieve fixed-parameter tractability for\n                  a special case of LIST-COLORED GRAPH MOTIF. We\n                  implemented the fixed-parameter algorithms for\n                  parameters |M| and |S|, developed further speed-up\n                  heuristics for these algorithms, and applied them in\n                  the context of querying protein-interaction networks,\n                  demonstrating their usefulness for realistic\n                  instances. Furthermore, we show that extending the\n                  request for motif connectedness to stronger demands,\n                  such as biconnectedness or bridge-connectedness leads\n                  to W[1]-hard problems when the parameter is the motif\n                  size |M|.},\n  keywords =\t {network analysis}\n}\n\n
\n
\n\n\n
\n We study the NP-hard LIST-COLORED GRAPH MOTIF problem which, given an undirected list-colored graph G = (V, E) and a multiset M of colors, asks for maximum-cardinality sets S ⊆ V and M' ⊆ M such that G[S] is connected and contains exactly (with respect to multiplicity) the colors in M'. LIST-COLORED GRAPH MOTIF has applications in the analysis of biological networks. We study LIST-COLORED GRAPH MOTIF with respect to three different parameterizations. For the parameters motif size |M| and solution size |S|, we present fixed-parameter algorithms, whereas for the parameter |V| - |M|, we show W[1]-hardness for general instances and achieve fixed-parameter tractability for a special case of LIST-COLORED GRAPH MOTIF. We implemented the fixed-parameter algorithms for parameters |M| and |S|, developed further speed-up heuristics for these algorithms, and applied them in the context of querying protein-interaction networks, demonstrating their usefulness for realistic instances. Furthermore, we show that extending the request for motif connectedness to stronger demands, such as biconnectedness or bridge-connectedness leads to W[1]-hard problems when the parameter is the motif size |M|.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n
\n
\n  \n 2010\n \n \n (2)\n \n \n
\n
\n \n \n
\n \n\n \n \n \n \n \n \n Kernelization through Tidying—A Case Study Based on $s$-Plex Cluster Vertex Deletion.\n \n \n \n \n\n\n \n van Bevern, R.; Moser, H.; and Niedermeier, R.\n\n\n \n\n\n\n In López-Ortiz, A., editor(s), LATIN 2010, volume 6034, of Lecture Notes in Computer Science, pages 527-538. Springer, 2010.\n \n\n\n\n
\n\n\n\n \n \n \"Kernelization slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n\n\n\n
\n
@incollection{BMN10,\n  title =\t {Kernelization through Tidying---A Case Study Based\n                  on $s$-Plex Cluster Vertex Deletion},\n  author =\t {René van Bevern and Hannes Moser and Rolf\n                  Niedermeier},\n  isbn =\t {978-3-642-12199-9},\n  url_Slides =\n                  {http://rvb.su/pdf/kernelization-through-tidying-talk-latin10.pdf},\n  year =\t {2010},\n  date =\t {2010-04-19},\n  booktitle =\t {LATIN 2010},\n  editor =\t {Alejandro López-Ortiz},\n  volume =\t {6034},\n  pages =\t {527-538},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {We introduce the NP-hard graph-based data clustering\n                  problem s-Plex Cluster Vertex Deletion, where the\n                  task is to delete at most k vertices from a graph so\n                  that the connected components of the resulting graph\n                  are s-plexes. In an s-plex, every vertex has an edge\n                  to all but at most s−1 other vertices; cliques are\n                  1-plexes. We propose a new method for kernelizing a\n                  large class of vertex deletion problems and\n                  illustrate it by developing an O(k^2 s^3)-vertex\n                  problem kernel for s-Plex Cluster Vertex Deletion\n                  that can be computed in O(ksn^2) time, where n is\n                  the number of graph vertices. The corresponding\n                  “kernelization through tidying” exploits\n                  polynomial-time approximation results.},\n  doi =\t\t {10.1007/978-3-642-12200-2_46},\n  keywords =\t {graph modification}\n}\n\n\n
\n
\n\n\n
\n We introduce the NP-hard graph-based data clustering problem s-Plex Cluster Vertex Deletion, where the task is to delete at most k vertices from a graph so that the connected components of the resulting graph are s-plexes. In an s-plex, every vertex has an edge to all but at most s−1 other vertices; cliques are 1-plexes. We propose a new method for kernelizing a large class of vertex deletion problems and illustrate it by developing an O(k^2 s^3)-vertex problem kernel for s-Plex Cluster Vertex Deletion that can be computed in O(ksn^2) time, where n is the number of graph vertices. The corresponding “kernelization through tidying” exploits polynomial-time approximation results.\n
\n\n\n
\n\n\n
\n \n\n \n \n \n \n \n \n Measuring Indifference: Unit Interval Vertex Deletion.\n \n \n \n \n\n\n \n van Bevern, R.; Komusiewicz, C.; Moser, H.; and Niedermeier, R.\n\n\n \n\n\n\n In Thilikos, D. M., editor(s), WG 2010, volume 6410, of Lecture Notes in Computer Science, pages 232–243. Springer, 2010.\n \n\n\n\n
\n\n\n\n \n \n \"Measuring preprint\n  \n \n \n \"Measuring slides\n  \n \n\n \n \n doi\n  \n \n\n \n link\n  \n \n\n bibtex\n \n\n \n  \n \n abstract \n \n\n \n\n \n \n \n \n \n \n \n\n  \n \n \n \n \n \n \n\n\n\n
\n
@incollection{BKMN10,\n  title =\t {Measuring Indifference: Unit Interval Vertex\n                  Deletion},\n  author =\t {René van Bevern and Christian Komusiewicz and Hannes\n                  Moser and Rolf Niedermeier},\n  url_Preprint =\n                  {http://www.akt.tu-berlin.de/fileadmin/fg34/publications-akt/unit-interval-vertex-deletion-wg10.pdf},\n  doi =\t\t {10.1007/978-3-642-16926-7_22},\n  url_Slides =\n                  {http://rvb.su/pdf/unit-interval-vertex-deletion-talk-wg2010.pdf},\n  isbn =\t {978-3-642-16925-0},\n  year =\t {2010},\n  date =\t {2010-06-28},\n  booktitle =\t {WG 2010},\n  editor =\t {Dimitrios M. Thilikos},\n  volume =\t {6410},\n  pages =\t {232--243},\n  publisher =\t {Springer},\n  series =\t {Lecture Notes in Computer Science},\n  abstract =\t {Making a graph unit interval by a minimum number of\n                  vertex deletions is NP-hard. The problem is\n                  motivated by applications in seriation and measuring\n                  indifference between data items. We present a\n                  fixed-parameter algorithm based on the iterative\n                  compression technique that finds in\n                  O((14k+14)^(k+1)kn^6) time a set of k vertices whose\n                  deletion from an n-vertex graph makes it unit\n                  interval. Additionally, we show that making a graph\n                  chordal by at most k vertex deletions is NP-complete\n                  even on claw,net,tent-free graphs.},\n  keywords =\t {algorithmic graph theory, graph modification}\n}\n\n
\n
\n\n\n
\n Making a graph unit interval by a minimum number of vertex deletions is NP-hard. The problem is motivated by applications in seriation and measuring indifference between data items. We present a fixed-parameter algorithm based on the iterative compression technique that finds in O((14k+14)^(k+1)kn^6) time a set of k vertices whose deletion from an n-vertex graph makes it unit interval. Additionally, we show that making a graph chordal by at most k vertex deletions is NP-complete even on claw,net,tent-free graphs.\n
\n\n\n
\n\n\n\n\n\n
\n
\n\n\n\n\n
\n\n\n \n\n \n \n \n \n\n
\n"}; document.write(bibbase_data.data);