var bibbase_data = {"data":"\n\n
\n <script src=\"https://bibbase.org/show?bib=http%3A%2F%2Frvb.su%2Frvb.bib&jsonp=1&theme=mila&jsonp=1\"></script>\n
\n \n <?php\n $contents = file_get_contents(\"https://bibbase.org/show?bib=http%3A%2F%2Frvb.su%2Frvb.bib&jsonp=1&theme=mila\");\n print_r($contents);\n ?>\n
\n \n <iframe src=\"https://bibbase.org/show?bib=http%3A%2F%2Frvb.su%2Frvb.bib&jsonp=1&theme=mila\"></iframe>\n
\n \n For more details see the documention.\n
\nTo the site owner:
\n\nAction required! Mendeley is changing its\n API. In order to keep using Mendeley with BibBase past April\n 14th, you need to:\n
\n \n \n Fix it now\n
\n@article{BKK+24,\n url =\t\t {https://jgaa.info/index.php/jgaa/article/view/2927},\n author =\t {van Bevern, René and Kanj, Iyad A. and Komusiewicz,\n Christian and Niedermeier, Rolf and Sorge, Manuel},\n keywords =\t {Discrete Mathematics (cs.DM), Combinatorics\n (math.CO), FOS: Computer and information sciences,\n FOS: Computer and information sciences, FOS:\n Mathematics, FOS: Mathematics},\n title =\t {The role of twins in computing planar supports of\n hypergraphs},\n journal =\t {Journal of Graph Algorithms and Applications},\n volume =\t 28,\n number =\t 1,\n pages =\t {51--79},\n year =\t {2024},\n}\n\n\n\n
@article{BS24,\n title =\t {A quadratic-order problem kernel for the traveling\n salesman problem parameterized by the vertex cover\n number},\n author =\t {René van Bevern and Daniel A. Skachkov},\n year =\t {2024},\n date =\t {2024-01},\n pages =\t {107065},\n volume =\t {52},\n journal =\t {Operations Research Letters},\n doi =\t\t {10.1016/j.orl.2024.107065},\n url_Preprint = {10.48550/arXiv.2207.08678}\n}\n\n\n\n\n
@article{BKS+24,\n url_Preprint = {10.48550/ARXIV.2109.06042},\n doi =\t\t {10.1016/j.jcss.2023.103479},\n author =\t {van Bevern, René and Kirilin, Artem M. and Skachkov,\n Daniel A. and Smirnov, Pavel V. and Tsidulko, Oxana},\n title =\t {Serial and parallel kernelization of Multiple\n Hitting Set parameterized by the Dilworth number,\n implemented on the GPU},\n journal =\t {Journal of Computer and System Sciences},\n year =\t {2024},\n date =\t {2024-02},\n volume =\t {139},\n pages =\t {103479}\n}\n\n\n\n\n
@article{BMST23,\n url_Preprint = {http://doi.org/10.48550/ARXIV.2205.08769},\n doi =\t\t {10.1016/j.orl.2023.06.005},\n author =\t {van Bevern, René and Melnikov, Andrey and Smirnov,\n Pavel and Tsidulko, Oxana},\n title =\t {On data reduction for dynamic vector bin packing},\n journal =\t {Operations Research Letters},\n volume =\t {51},\n pages =\t {446-452},\n year =\t {2023},\n date =\t {2023-07-04}\n}\n\n\n\n
@article{BBF+23,\n doi =\t\t {10.1016/j.dam.2022.11.018},\n url_Preprint = {https://doi.org/10.1016/j.dam.2022.11.018},\n year =\t {2023},\n month =\t mar,\n publisher =\t {Elsevier {BV}},\n volume =\t {328},\n pages =\t {117--133},\n author =\t {Matthias Bentert and Ren{\\'{e}} van Bevern and Till\n Fluschnik and Andr{\\'{e}} Nichterlein and Rolf\n Niedermeier},\n title =\t {Polynomial-time data reduction for weighted problems\n beyond additive goal functions},\n journal =\t {Discrete Applied Mathematics}\n}\n\n\n
@article{BBN+22,\n author =\t {{Bentert}, Matthias and René van Bevern and\n {Nichterlein}, André and {Niedermeier}, Rolf and\n Smirnov, Pavel V.},\n title =\t {Parameterized algorithms for power-efficiently\n connecting wireless sensor networks: Theory and\n experiments},\n journal =\t {INFORMS Journal on Computing},\n year =\t 2022,\n abstract =\t {We study an NP-hard problem motivated by\n energy-efficiently maintaining the connectivity of a\n symmetric wireless sensor communication network:\n Given an edge-weighted n-vertex graph, find a\n connected spanning subgraph of minimum cost, where\n the cost is determined by letting each vertex pay\n the most expensive edge incident to it in the\n subgraph. On the negative side, we show that\n o(logn)-approximating the difference d between the\n optimal solution cost and a natural lower bound is\n NP-hard and that, under the Exponential Time\n Hypothesis, there are no exact algorithms running in\n 2^o(n) time or in f(d)⋅n^O(1) time for any\n computable function f. On the positive side, we\n provide an algorithm that reconnects O(logn)\n connected components with minimum additional cost in\n polynomial time. These algorithms are motivated by\n application scenarios of monitoring areas or where\n an existing sensor network may fall apart into\n several connected components due to sensor\n faults. In experiments, the algorithm solves\n instances with four such connected components and\n about 8 000 vertices in five minutes, outperforming\n CPLEX with known ILP formulations for the problem.},\n url_Code =\t {https://gitlab.com/rvb/mpsc},\n url_Slides =\n {https://www.researchgate.net/publication/328262005_The_min-power_symmetric_connectivity_problem_with_few_obligatory_network_components},\n doi =\t\t {10.1287/ijoc.2020.1045},\n url_Preprint = {https://arxiv.org/abs/1706.03177},\n volume =\t 34,\n number =\t 1,\n pages =\t {55--75},\n date =\t {2022-03-22},\n keywords =\t {connected spanning subgraphs, monitoring areas,\n reconnecting sensor networks, parameterized\n complexity analysis, approximation hardness,\n parameterization above lower bounds, color-coding,\n experimental comparison},\n}\n\n\n\n
@article{BK21,\n doi =\t\t {10.1007/s00224-021-10033-0},\n year =\t {2021},\n month =\t apr,\n publisher =\t {Springer},\n author =\t {Ren{\\'{e}} van Bevern and Gregory Kucherov},\n title =\t {Special Issue on Computer Science Symposium in\n Russia (2019)},\n journal =\t {Theory of Computing Systems},\n volume =\t 65,\n number =\t 3,\n pages =\t {441--443},\n}\n\n\n
@article{ABT21,\n title =\t {The Hierarchical Chinese Postman Problem: the\n slightest disorder makes it hard, yet\n disconnectedness is manageable},\n author =\t {Vsevolod A. Afanasev and René van Bevern and Oxana\n Tsidulko},\n journal =\t {Operations Research Letters},\n year =\t 2021,\n volume =\t 49,\n number =\t 2,\n pages =\t {270--277},\n doi =\t\t {10.1016/j.orl.2021.01.017},\n date =\t {2021-02-05},\n url_Preprint = {https://arxiv.org/abs/2011.04022},\n url_Poster =\t {https://doi.org/10.6084/m9.figshare.13536797},\n url_Slides =\n {https://www.researchgate.net/publication/346675762_Hierarchical_Chinese_Postman_Problem_slightest_disorder_makes_it_hard_disconnectedness_is_manageable},\n keywords =\t {approximation algorithm, fixed-parameter algorithm,\n NP-hardness, arc routing, rural postman problem,\n temporal graphs},\n abstract =\t {The Hierarchical Chinese Postman Problem is finding\n a shortest traversal of all edges of a graph\n respecting precedence constraints given by a partial\n order on classes of edges. We show that the special\n case with connected classes is NP-hard even on\n orders decomposable into a chain and an incomparable\n class. For the case with linearly ordered (possibly\n disconnected) classes, we get 5/3-approximations and\n fixed-parameter algorithms by transferring results\n from the Rural Postman Problem.}\n}\n\n\n
@article{BTZ21,\n author =\t {Ren{\\'{e}} van Bevern and Oxana Tsidulko and Philipp\n Zschoche},\n title =\t {Representative families for matroid intersections,\n with applications to location, packing, and covering\n problems},\n year =\t {2021},\n journal =\t {Discrete Applied Mathematics},\n volume =\t {298},\n pages =\t {110-128},\n date =\t {2021-03-22},\n url_Preprint = {https://arxiv.org/abs/1806.11527},\n doi =\t\t {10.1016/j.dam.2021.03.014},\n url_Slides =\n {https://figshare.com/articles/presentation/Facility_location_under_matroid_constraints_fixed-parameter_algorithms_and_applications/13536788},\n abstract =\t {We show algorithms for computing representative\n families for matroid intersections and use them in\n fixed-parameter algorithms for set packing, set\n covering, and facility location problems with\n multiple matroid constraints. We complement our\n tractability results by hardness results. },\n}\n\n\n
@article{BS20b,\n title =\t {A historical note on the 3/2-approximation algorithm\n for the metric traveling salesman problem},\n author =\t {René van Bevern and Viktoriia A. Slugina},\n year =\t {2020},\n date =\t {2020-11-17},\n journal =\t {Historia Mathematica},\n url_Preprint = {https://arxiv.org/abs/2004.02437},\n url_Poster =\n {https://figshare.com/articles/poster/Parallel_Roads_to_the_Traveling_Salesman_3_2-Approximation/12728723},\n url_Slides =\n {https://www.researchgate.net/publication/348297719_Algoritm_Kristofidesa_ili_Kristofidesa-Serdukova_Istoria_otkrytia_i_publikacii},\n volume =\t 53,\n pages =\t {118-127},\n doi =\t\t {10.1016/j.hm.2020.04.003},\n abstract =\t {One of the most fundamental results in combinatorial\n optimization is the polynomial-time\n 3/2-approximation algorithm for the metric traveling\n salesman problem. It was presented by Christofides\n in 1976 and is well known as "the Christofides\n algorithm". Recently, some authors started calling\n it "Christofides-Serdyukov algorithm", pointing out\n that the same result was published independently in\n the USSR in 1978. We provide some historic\n background on Serdyukov's findings and a translation\n of his article.}\n}\n\n\n\n\n
@article{BKM+20,\n title =\t {H-Index Manipulation by Undoing Merges},\n author =\t {René van Bevern and Christian Komusiewicz and\n Hendrik Molter and Rolf Niedermeier and Manuel Sorge\n and Toby Walsh},\n doi =\t\t {10.1162/qss_a_00093},\n url_Preprint = {http://arxiv.org/abs/1604.04827},\n url_Code =\t {https://gitlab.com/rvb/split-index},\n url_Poster =\n {https://figshare.com/articles/poster/H-index_manipulation_by_undoing_merges/13521758},\n journal =\t {Quantitative Science Studies},\n volume =\t 1,\n number =\t 4,\n pages =\t {1529--1552},\n abstract =\t {The h-index is an important bibliographic measure\n used to assess the performance of\n researchers. Dutiful researchers merge different\n versions of their articles in their Google Scholar\n profile even though this can decrease their\n h-index. In this article, we study the manipulation\n of the h-index by undoing such merges. In contrast\n to manipulation by merging articles (van Bevern et\n al. [Artif. Intel. 240:19-35, 2016]) such\n manipulation is harder to detect. We present\n numerous results on computational complexity (from\n linear-time algorithms to parameterized\n computational hardness results) and empirically\n indicate that at least small improvements of the\n h-index by splitting merged articles are\n unfortunately easily achievable.},\n year =\t 2020,\n date =\t {2020-10-13},\n}\n\n\n\n\n\n\n
@article{BFT20b,\n author =\t {Ren{\\'{e}} van Bevern and Till Fluschnik and Oxana\n Tsidulko},\n title =\t {On approximate data reduction for the {Rural Postman\n Problem}: Theory and experiments},\n year =\t {2020},\n date =\t {2020-11-02},\n journal =\t {Networks},\n url_Preprint = {https://arxiv.org/abs/1812.10131},\n url_Poster =\n {https://figshare.com/articles/poster/On_approximate_data_reduction_for_the_Rural_Postman_Problem/13058969},\n url_Slides =\n {https://www.researchgate.net/publication/348297308_On_approximate_approximate_data_reduction_for_the_Rural_Postman_Problem_Theory_and_Experiments},\n doi =\t\t {10.1002/net.21985},\n volume =\t 76,\n number =\t 4,\n pages =\t {485-508},\n url_Code =\t {https://gitlab.com/rvb/rpp-psaks},\n abstract =\t {Given a graph G=(V,E) with edge weights ω:E→N∪{0}\n and a subset R⊆E of required edges, the Rural\n Postman Problem (RPP) is to find a closed walk W* of\n minimum weight ω(W*) containing all edges of R. We\n prove that RPP is WK[1]-complete parameterized by\n the number and cost d=ω(W^*)-ω(R)+|W*|-|R| of edges\n traversed additionally to the required ones, that\n is, presumably cannot be polynomial-time reduced to\n solving instances of size polynomial in d. In\n contrast, denoting by b≤2d the number of vertices\n incident to an odd number of edges of R and by c≤d\n the number of connected components formed by the\n edges in R, we show how to reduce any RPP instance I\n to an RPP instance I' with 2b+O(c/ε) vertices in\n O(n³) time so that any α-approximate solution for I'\n gives an α(1+ε)-approximate solution for I, for any\n α≥1 and ε>0. That is, we provide a polynomial-size\n approximate kernelization scheme (PSAKS). We make\n first steps towards a PSAKS for the parameter c.}\n}\n\n\n
@article{BS20,\n title =\t {Optimal-size problem kernels for $d$-Hitting Set in\n linear time and space},\n author =\t {René van Bevern and Pavel V. Smirnov},\n year =\t {2020},\n date =\t {2020-10-01},\n journal =\t {Information Processing Letters},\n volyme =\t {163},\n pages =\t {105998},\n doi =\t\t {10.1016/j.ipl.2020.105998},\n url_Preprint = {https://arxiv.org/abs/2003.04578},\n url_Code =\t {https://gitlab.com/PavelSmirnov/hs-lintimespace},\n url_Poster =\t {https://figshare.com/articles/poster/Optimal-size_problem_kernels_for_d-Hitting_Set_in_linear_time_and_space/12782975},\n abstract =\t {We improve two linear-time data reduction algorithms\n for the d-Hitting Set problem to work in linear\n space, thus obtaining the first algorithms for\n computing problem kernels of asymptotically optimal\n size O(k^d) for d-Hitting Set in linear time and\n space. We experimentally compare the two algorithms\n to a classical data reduction algorithm of Weihe and\n evaluate their combinations.}\n}\n\n\n\n\n
@article{BFT20,\n author =\t {Ren{\\'{e}} van Bevern and Till Fluschnik and Oxana\n Tsidulko},\n title =\t {Parameterized algorithms and data reduction for the\n short secluded $s$-$t$-path problem},\n journal =\t {Networks},\n year =\t 2020,\n date =\t {2020-01-01},\n volume =\t {75},\n number =\t {1},\n pages =\t {34-63},\n url_Preprint = {https://arxiv.org/abs/1806.09540},\n doi =\t\t {10.1002/net.21904},\n abstract =\t {Given a graph G=(V,E), two vertices s,t∈V, and two\n integers k,ℓ, we search for a simple s-t-path with\n at most k vertices and at most ℓ neighbors. For\n graphs with constant crossing number, we provide a\n subexponential 2^O(√n)-time algorithm, prove a\n matching lower bound, and show a polynomial-time\n data reduction algorithm that reduces any problem\n instance to an equivalent instance (a so-called\n problem kernel) of size polynomial in the vertex\n cover number of the input graph. In contrast, we\n show that the problem in general graphs is hard to\n preprocess. We obtain a 2^O(ω)⋅ℓ^2⋅n-time algorithm\n for graphs of treewidth ω, show that there is no\n problem kernel with size polynomial in ω, yet show a\n problem kernels with size polynomial in the feedback\n edge number of the input graph and with size\n polynomial in the feedback vertex number, k, and ℓ.}\n}\n\n\n
@proceedings{BK19,\n editor =\t {Ren{\\'{e}} van Bevern and Gregory Kucherov},\n title =\t {CSR 2019},\n series =\t {Lecture Notes in Computer Science},\n volume =\t {11532},\n publisher =\t {Springer},\n year =\t {2019},\n doi =\t\t {10.1007/978-3-030-19955-5},\n timestamp =\t {Tue, 25 Jun 2019 14:30:35 +0200},\n biburl =\t {https://dblp.org/rec/bib/conf/csr/2019},\n bibsource =\t {dblp computer science bibliography,\n https://dblp.org}\n}\n\n\n\n\n
@article{BBN19,\n author =\t {Matthias Bentert and Ren{\\'{e}} van Bevern and Rolf\n Niedermeier},\n journal =\t {Journal of Scheduling},\n date =\t {2019-02-01},\n title =\t {Inductive $k$-independent graphs and $c$-colorable\n subgraphs in scheduling: A review},\n year =\t 2019,\n doi =\t\t {10.1007/s10951-018-0595-8},\n volume =\t {22},\n number =\t 1,\n pages =\t {3--20},\n url_Preprint = {https://arxiv.org/abs/1712.06481},\n url_Poster =\n {https://www.researchgate.net/publication/342122158_Wireless_Scheduling_Graph_Classes_and_c-Colorable_Subgraphs},\n abstract =\t {Inductive k-independent graphs generalize chordal\n graphs and have recently been advocated in the\n context of interference-avoiding wireless\n communication scheduling. The NP-hard problem of\n finding maximum-weight induced c-colorable\n subgraphs, which is a generalization of finding\n maximum independent sets, naturally occurs when\n selecting c sets of pairwise non-conflicting jobs\n (modeled as graph vertices). We investigate the\n parameterized complexity of this problem on\n inductive k-independent graphs. We show that the\n Independent Set problem is W[1]-hard even on\n 2-simplicial 3-minoes---a subclass of inductive\n 2-independent graphs. In contrast, we prove that the\n more general Maximum c-Colorable Subgraph problem is\n fixed-parameter tractable on edge-wise unions of\n cluster and chordal graphs, which are\n 2-simplicial. In both cases, the parameter is the\n solution size. Aside from this, we survey other\n graph classes between inductive 1-inductive and\n inductive 2-inductive graphs with applications in\n scheduling.}\n}\n\n\n\n
@article{BPS19,\n author =\t {René A. van Bevern and Artem V. Pyatkin and Sergey\n V. Sevastyanov},\n journal =\t {Siberian Electronic Mathematical Reports},\n title =\t {An algorithm with parameterized complexity of\n constructing the optimal schedule for the routing\n open shop problem with unit execution times},\n date =\t {2019-01-27},\n year =\t 2019,\n volume =\t 16,\n pages =\t {42--84},\n doi =\t\t {10.33048/semi.2019.16.003},\n issn =\t {1813-3304},\n abstract =\t {For the Routing Open Shop problem with unit\n execution times, the first algorithm with\n parameterized complexity is designed for\n constructing an optimal schedule. Its running time\n is bounded by a function (Pol(|V |) + f(m, g))·|I|,\n where Pol(|V|) is a polynomial of the number of\n network nodes, f(m, g) is a function of the number\n of machines and the number of job locations, and |I|\n is the input length in its compact encoding.}\n}\n\n\n
@incollection{BFT19,\n volume =\t {11548},\n pages =\t {279--294},\n author =\t {Ren{\\'{e}} van Bevern and Till Fluschnik and Oxana\n Tsidulko},\n title =\t {On $(1+\\varepsilon)$-approximate data reduction for\n the {Rural Postman Problem}},\n year =\t 2019,\n date =\t {2019-07-06},\n booktitle =\t {MOTOR 2019},\n editor =\t {Michael Khachay and Yury Kochetov and Panos\n Pardalos},\n series =\t {Lecture Notes in Computer Science},\n publisher =\t {Springer},\n url_Preprint = {https://arxiv.org/abs/1812.10131v1},\n url_Slides =\n {https://www.researchgate.net/publication/342122216_On_1e-approximate_data_reduction_for_the_Rural_Postman_Problem},\n doi =\t\t {10.1007/978-3-030-22629-9_20},\n abstract =\t {Given a graph G=(V,E) with edge weights ω:E→N∪{0}\n and a subset R⊆E of required edges, the Rural\n Postman Problem (RPP) is to find a closed walk W* of\n minimum weight ω(W*) containing all edges of R. We\n prove that RPP is WK[1]-complete parameterized by\n the number and cost d=ω(W^*)-ω(R)+|W*|-|R| of edges\n traversed additionally to the required ones, that\n is, presumably cannot be polynomial-time reduced to\n solving instances of size polynomial in d. In\n contrast, denoting by b≤2d the number of vertices\n incident to an odd number of edges of R and by c≤d\n the number of connected components formed by the\n edges in R, we show how to reduce any RPP instance I\n to an RPP instance I' with 2b+O(c/ε) vertices in\n O(n³) time so that any α-approximate solution for I'\n gives an α(1+ε)-approximate solution for I, for any\n α≥1 and ε>0. That is, we provide a polynomial-size\n approximate kernelization scheme (PSAKS). We make\n first steps towards a PSAKS for the parameter c.}\n}\n\n\n\n\n\n
@incollection{BTZ19,\n author =\t {Ren{\\'{e}} van Bevern and Oxana Tsidulko and Philipp\n Zschoche},\n year =\t "2019",\n pages =\t "62--74",\n title =\t {Fixed-parameter algorithms for maximum-profit\n facility location under matroid constraints},\n editor =\t {Pinar Heggernes},\n booktitle =\t {CIAC 2019},\n series =\t {Lecture Notes in Computer Science},\n publisher =\t {Springer},\n volume =\t {11485},\n date =\t {2019-05-27},\n doi =\t\t {10.1007/978-3-030-17402-6_6},\n abstract =\t {We consider a variant of the matroid median problem\n introduced by Krishnaswamy et al. [SODA 2011]: an\n uncapacitated discrete facility location problem\n where the task is to decide which facilities to open\n and which clients to serve for maximum profit so\n that the facilities form an independent set in given\n facility-side matroids and the clients form an\n independent set in given client-side matroids. We\n show that the problem is fixed-parameter tractable\n parameterized by the number of matroids and the\n minimum rank among the client-side matroids. To this\n end, we derive fixed-parameter algorithms for\n computing representative families for matroid\n intersections and maximum-weight set packings with\n multiple matroid constraints. To illustrate the\n modeling capabilities of the new problem, we use it\n to obtain algorithms for a problem in social network\n analysis. We complement our tractability results by\n lower bounds.}\n}\n\n\n\n
@article{BFM+18,\n title =\t {The parameterized complexity of finding secluded\n solutions to some classical optimization problems on\n graphs},\n author =\t {René van Bevern and Till Fluschnik and George\n B. Mertzios and Hendrik Molter and Manuel Sorge and\n Ondřej Suchý},\n date =\t {2018-11-01},\n year =\t 2018,\n journal =\t {Discrete Optimzation},\n volume =\t 30,\n pages =\t {20--50},\n url_Preprint = {https://arxiv.org/abs/1606.09000},\n doi =\t\t {10.1016/j.disopt.2018.05.002},\n abstract =\t {This work studies the parameterized complexity of\n finding secluded solutions to classical\n combinatorial optimization problems on graphs such\n as finding minimum s-t separators, feedback vertex\n sets, dominating sets, maximum independent sets, and\n vertex deletion problems for hereditary graph\n properties: Herein, one searches not only to\n minimize or maximize the size of the solution, but\n also to minimize the size of its neighborhood. This\n restriction has applications in secure routing and\n community detection.}\n}\n\n\n\n
@incollection{BFT18,\n author =\t {Ren{\\'{e}} van Bevern and Till Fluschnik and Oxana\n Tsidulko},\n title =\t {Parameterized algorithms and data reduction for safe\n convoy routing},\n year =\t 2018,\n date =\t {2018-08-29},\n doi =\t\t {10.4230/OASIcs.ATMOS.2018.10},\n editor =\t {Ralf Borndörfer and Sabine Storandt},\n booktitle =\t {ATMOS 2018},\n publisher =\t {Schloss Dagstuhl--Leibniz-Zentrum fuer Informatik},\n series =\t {OpenAccess Series in Informatics (OASIcs)},\n pages =\t {10:1--10:19},\n volume =\t 65,\n keywords =\t {NP-hard problem, fixed-parameter tractability,\n problem kernelization, shortest path, secluded\n solution},\n abstract =\t {We study a problem that models safely routing a\n convoy through a transportation network, where any\n vertex adjacent to the travel path of the convoy\n requires additional precaution: Given a graph\n G=(V,E), two vertices s,t∈V, and two integers k,ℓ,\n we search for a simple s-t-path with at most k\n vertices and at most ℓ neighbors. We study the\n problem in two types of transportation networks:\n graphs with small crossing number, as formed by road\n networks, and tree-like graphs, as formed by\n waterways. For graphs with constant crossing number,\n we provide a subexponential 2^O(√n)-time algorithm\n and prove a matching lower bound. We also show a\n polynomial-time data reduction algorithm that\n reduces any problem instance to an equivalent\n instance (a so-called problem kernel) of size\n polynomial in the vertex cover number of the input\n graph. In contrast, we show that the problem in\n general graphs is hard to preprocess. Regarding\n tree-like graphs, we obtain a 2^O(tw)⋅ℓ^2⋅n-time\n algorithm for graphs of treewidth tw, show that\n there is no problem kernel with size polynomial in\n tw, yet show a problem kernel with size polynomial\n in the feedback edge number of the input graph.}\n}\n\n\n\n
@article{MB18,\n title =\t "Parameterized complexity of machine scheduling: 15\n open problems",\n journal =\t "Computers \\& Operations Research",\n volume =\t "100",\n pages =\t "254--261",\n year =\t "2018",\n issn =\t "0305-0548",\n doi =\t\t "10.1016/j.cor.2018.07.020",\n author =\t "Matthias Mnich and René van Bevern",\n keywords =\t "Parallel machines, Shop scheduling, Makespan, Total\n completion time, Total tardiness, Throughput, Number\n of tardy jobs",\n abstract =\t "Machine scheduling problems are a long-time key\n domain of algorithms and complexity research. A\n novel approach to machine scheduling problems are\n fixed-parameter algorithms. To stimulate this\n thriving research direction, we propose 15 open\n questions in this area whose resolution we expect to\n lead to the discovery of new approaches and\n techniques both in scheduling and parameterized\n complexity theory.",\n date =\t {2018-08-16},\n url_Preprint = {http://arxiv.org/abs/1709.01670},\n url_Slides =\n {https://www.researchgate.net/publication/342122033_Open_problems_on_the_parameterized_complexity_of_scheduling_problems}\n}\n\n\n\n\n
@article{BFK18,\n title =\t {Parameterizing edge modification problems above\n lower bounds},\n author =\t {René van Bevern and Vincent Froese and Christian\n Komusiewicz},\n url_Preprint = {http://arxiv.org/abs/1512.04047},\n doi =\t\t {10.1007/s00224-016-9746-5},\n url_Slides =\t {http://rvb.su/pdf/above-guarantee-editing-csr16.pdf},\n year =\t 2018,\n date =\t {2018-04-01},\n journal =\t {Theory of Computing Systems},\n abstract =\t {We study the parameterized complexity of a variant\n of the F-free Editing problem: Given a graph G and a\n natural number k, is it possible to modify at most k\n edges in G so that the resulting graph contains no\n induced subgraph isomorphic to F? In our variant,\n the input additionally contains a vertex-disjoint\n packing H of induced subgraphs of G, which provides\n a lower bound h(H) on the number of edge\n modifications required to transform G into an F-free\n graph. While earlier works used the number k as\n parameter or structural parameters of the input\n graph G, we consider instead the parameter\n l:=k-h(H), that is, the number of edge modifications\n above the lower bound h(H). We develop a framework\n of generic data reduction rules to show\n fixed-parameter tractability with respect to l for\n K_3-Free Editing, Feedback Arc Set in Tournaments,\n and Cluster Editing when the packing H contains\n subgraphs with bounded solution size. For K_3-Free\n Editing, we also prove NP-hardness in case of\n edge-disjoint packings of K_3s and l=0, while for\n K_q-Free Editing and qge 6, NP-hardness for l=0 even\n holds for vertex-disjoint packings of K_qs. In\n addition, we provide NP-hardness results for F-free\n Vertex Deletion, were the aim is to delete a minimum\n number of vertices to make the input graph F-free.},\n keywords =\t {algorithmic graph theory, graph modification,\n invited, network analysis, NP-hard, parameterized\n complexity},\n volume =\t "62",\n number =\t "3",\n pages =\t "739--770",\n}\n\n\n
@incollection{BBNN17,\n author =\t {{Bentert}, M. and R. van Bevern and {Nichterlein},\n A. and {Niedermeier}, R.},\n title =\t {Parameterized algorithms for power-efficient\n connected symmetric wireless sensor networks},\n booktitle =\t {ALGOSENSORS 2017},\n editor =\t {Antonio Fernández Anta and Tomasz Jurdzinski and\n Miguel A. Mosteiro and Yanyong Zhang},\n year =\t 2017,\n series =\t {Lecture Notes in Computer Science},\n volume =\t {10718},\n pages =\t "26--40",\n doi =\t\t "10.1007/978-3-319-72751-6_3",\n url_Slides =\n {https://www.researchgate.net/publication/328262005_The_min-power_symmetric_connectivity_problem_with_few_obligatory_network_components},\n publisher =\t {Springer},\n date =\t {2017-12-31},\n abstract =\t {We study an NP-hard problem motivated by\n energy-efficiently maintaining the connectivity of a\n symmetric wireless sensor communication\n network. Given an edge-weighted n-vertex graph, find\n a connected spanning subgraph of minimum cost, where\n the cost is determined by letting each vertex pay\n the most expensive edge incident to it in the\n subgraph. We provide an algorithm that works in\n polynomial time if one can find a set of obligatory\n edges that yield a spanning subgraph with O(logn)\n connected components. We also provide a linear-time\n algorithm that reduces any input graph that consists\n of a tree together with g additional edges to an\n equivalent graph with O(g) vertices. Based on this,\n we obtain a polynomial-time algorithm for\n g∈O(logn). On the negative side, we show that\n o(logn)-approximating the difference d between the\n optimal solution cost and a natural lower bound is\n NP-hard and that there are presumably no exact\n algorithms running in 2o(n) time or in f(d)⋅nO(1)\n time for any computable function f.}\n}\n\n\n\n
@article{BKS17,\n title =\t {A parameterized approximation algorithm for the\n mixed and windy Capacitated Arc Routing Problem:\n theory and experiments},\n author =\t {René van Bevern and Christian Komusiewicz and Manuel\n Sorge},\n journal =\t {Networks},\n volume =\t {70},\n number =\t {3},\n issn =\t {1097-0037},\n doi =\t\t {10.1002/net.21742},\n pages =\t {262--278},\n keywords =\t {vehicle routing, transportation, Rural Postman,\n Chinese Postman, NP-hard problem, fixed-parameter\n algorithm, combinatorial optimization},\n year =\t {2017},\n url_Preprint = {https://arxiv.org/abs/1506.05620},\n url_Slides =\n {http://rvb.su/pdf/approximation-directed-arc-routing-atmos15.pdf},\n url_Code =\t {https://gitlab.com/rvb/mwcarp-approx},\n date =\t {2017-10-01},\n abstract =\t {We prove that any polynomial-time α(n)-approximation\n algorithm for the n-vertex metric asymmetric\n Traveling Salesperson Problem yields a\n polynomial-time O(α(C))-approximation algorithm for\n the mixed and windy Capacitated Arc Routing Problem,\n where C is the number of weakly connected components\n in the subgraph induced by the positive-demand\n arcs---a small number in many applications. In\n conjunction with known results, we obtain\n constant-factor approximations for C∈O(log n) and\n O(log C/log log C)-approximations in general.\n Experiments show that our algorithm, together with\n several heuristic enhancements, outperforms many\n previous polynomial-time heuristics. Finally, since\n the solution quality achievable in polynomial time\n appears to mainly depend on C and since C=1 in\n almost all benchmark instances, we propose the Ob\n benchmark set, simulating cities that are divided\n into several components by a river.}\n}\n\n\n
@incollection{BFM+17,\n author =\t {René van Bevern and Till Fluschnik and George\n B. Mertzios and Hendrik Molter and Manuel Sorge and\n Ondřej Suchý},\n title =\t {Finding Secluded Places of Special Interest in\n Graphs},\n date =\t {2017-02-10},\n booktitle =\t {IPEC 2016},\n editor =\t {Jiong Guo and Danny Hermelin},\n abstract =\t {Finding a vertex subset in a graph that satisfies a\n certain property is one of the most-studied topics\n in algorithmic graph theory. The focus herein is\n often on minimizing or maximizing the size of the\n solution, that is, the size of the desired vertex\n set. In several applications, however, we also want\n to limit the "exposure" of the solution to the rest\n of the graph. This is the case, for example, when\n the solution represents persons that ought to deal\n with sensitive information or a segregated\n community. In this work, we thus explore the\n (parameterized) complexity of finding such secluded\n vertex subsets for a wide variety of properties that\n they shall fulfill. More precisely, we study the\n constraint that the (open or closed) neighborhood of\n the solution shall be bounded by a parameter and the\n influence of this constraint on the complexity of\n minimizing separators, feedback vertex sets, F-free\n vertex deletion sets, dominating sets, and the\n maximization of independent sets.},\n pages =\t {5:1--5:16},\n series =\t {Leibniz International Proceedings in Informatics\n (LIPIcs)},\n ISBN =\t {978-3-95977-023-1},\n ISSN =\t {1868-8969},\n year =\t {2017},\n volume =\t {63},\n publisher =\t {Schloss Dagstuhl--Leibniz-Zentrum fuer Informatik},\n doi =\t\t {10.4230/LIPIcs.IPEC.2016.5},\n}\n\n\n
@article{BBB+17,\n title =\t {Partitioning perfect graphs into stars},\n author =\t {René van Bevern and Robert Bredereck and Laurent\n Bulteau and Jiehua Chen and Vincent Froese and Rolf\n Niedermeier and Gerhard J. Woeginger},\n url_Preprint = {http://arxiv.org/abs/1402.2589},\n doi =\t\t {10.1002/jgt.22062},\n year =\t 2017,\n date =\t {2017-06-01},\n journal =\t {Journal of Graph Theory},\n abstract =\t {The partition of graphs into nice subgraphs is a\n central algorithmic problem with strong ties to\n matching theory. We study the partitioning of\n undirected graphs into stars of the same size, a\n problem known to be NP-complete even for the case of\n stars on three vertices. We perform a thorough\n computational complexity study of the problem on\n subclasses of perfect graphs and identify several\n polynomial-time solvable cases, for example, on\n interval graphs and bipartite permutation graphs,\n and also NP-complete cases, for example, on grid\n graphs and chordal graphs.},\n volume =\t 85,\n number =\t 2,\n pages =\t {297--335},\n keywords =\t {algorithmic graph theory}\n}\n\n\n
@article{BBC+17,\n title =\t "Fixed-parameter algorithms for {DAG} Partitioning",\n journal =\t "Discrete Applied Mathematics",\n volume =\t 220,\n pages =\t "134--160",\n year =\t 2017,\n issn =\t "0166-218X",\n doi =\t\t "10.1016/j.dam.2016.12.002",\n author =\t "René van Bevern and Robert Bredereck and Morgan\n Chopin and Sepp Hartung and Falk Hüffner and André\n Nichterlein and Ondřej Suchý",\n keywords =\t "NP-hard problem",\n keywords =\t "Graph algorithms",\n keywords =\t "Polynomial-time data reduction",\n keywords =\t "Multiway cut",\n keywords =\t "Linear-time algorithms",\n keywords =\t "Algorithm engineering",\n keywords =\t "Evaluating heuristics ",\n abstract =\t "Abstract Finding the origin of short phrases\n propagating through the web has been formalized by\n Leskovec et al. (2009) as \\{DAG\\} Partitioning:\n given an arc-weighted directed acyclic graph on n\n vertices and m arcs, delete arcs with total weight\n at most k such that each resulting weakly-connected\n component contains exactly one sink—a vertex without\n outgoing arcs. \\{DAG\\} Partitioning is NP-hard. We\n show an algorithm to solve \\{DAG\\} Partitioning in O\n ( 2 k ⋅ ( n + m ) ) time, that is, in linear time\n for fixed k . We complement it with linear-time\n executable data reduction rules. Our experiments\n show that, in combination, they can optimally solve\n \\{DAG\\} Partitioning on simulated citation networks\n within five minutes for k ≤ 190 and m being 10 7\n and larger. We use our obtained optimal solutions to\n evaluate the solution quality of Leskovec et al.’s\n heuristic. We show that Leskovec et al.’s heuristic\n works optimally on trees and generalize this result\n by showing that \\{DAG\\} Partitioning is solvable in\n 2 O ( t 2 ) ⋅ n time if a width- t tree\n decomposition of the input graph is given. Thus, we\n improve an algorithm and answer an open question of\n Alamdari and Mehrabian (2012). We complement our\n algorithms by lower bounds on the running time of\n exact algorithms and on the effectivity of data\n reduction. ",\n url_Preprint = {https://arxiv.org/abs/1611.08809},\n url_Code =\t {https://gitlab.com/rvb/dagpart/}\n}\n\n\n\n
@article{BNS17,\n title =\t {A parameterized complexity view on non-preemptively\n scheduling interval-constrained jobs: few machines,\n small looseness, and small slack},\n author =\t {René van Bevern and Rolf Niedermeier and Ondřej\n Suchý},\n url_Preprint = {http://arxiv.org/abs/1508.01657},\n doi =\t\t {10.1007/s10951-016-0478-9},\n url_Slides =\t {http://rvb.su/pdf/looseness-slack-door16.pdf},\n issn =\t {1094-6136},\n year =\t 2017,\n date =\t {2017-06-01},\n journal =\t {Journal of Scheduling},\n publisher =\t {Springer},\n abstract =\t {We study the problem of non-preemptively scheduling\n n jobs, each job j with a release time t_j, a\n deadline d_j, and a processing time p_j, on m\n parallel identical machines. Cieliebak et\n al. considered the two constraints |d_j-t_j| ≤ λp_j\n and |d_j-t_j| ≤ p_j+σ and showed the problem to be\n NP-hard for any λ>1 and for any σ≥2. We complement\n their results by parameterized complexity studies:\n we show that, for any λ>1, the problem remains\n weakly NP-hard even for m=2 and strongly W[1]-hard\n parameterized by m. We present a\n pseudo-polynomial-time algorithm for constant m and\n λ and a fixed-parameter tractability result for the\n parameter m combined with σ.},\n volume =\t 20,\n number =\t 3,\n pages =\t {255–265}\n}\n\n\n
@incollection{BKK+16,\n title =\t {Twins in Subdivision Drawings of Hypergraphs},\n author =\t {René van Bevern and Iyad Kanj and Christian\n Komusiewicz and Rolf Niedermeier and Manuel Sorge},\n year =\t 2016,\n date =\t {2016-12-08},\n booktitle =\t {GD 2016},\n editor =\t {Yifan Hu and Martin Nöllenburg},\n volume =\t {9801},\n series =\t {Lecture Notes in Computer Science},\n publisher =\t {Springer},\n url_Preprint = {http://arxiv.org/abs/1511.09389v4},\n abstract =\t {Visualizing hypergraphs, systems of subsets of some\n universe, has continuously attracted research\n interest in the last decades. We study a natural\n kind of hypergraph visualization called subdivision\n drawings. Dinkla et al. [Comput. Graph. Forum ’12]\n claimed that only few hypergraphs have a subdivision\n drawing. However, this statement seems to be based\n on the assumption (also used in previous work) that\n the input hypergraph does not contain twins, pairs\n of vertices which are in precisely the same\n hyperedges (subsets of the universe). We show that\n such vertices may be necessary for a hypergraph to\n admit a subdivision drawing. As a counterpart, we\n show that the number of such “necessary twins” is\n upper-bounded by a function of the number m of\n hyperedges and a further parameter r of the desired\n drawing related to its number of layers. This leads\n to a linear-time algorithm for determining such\n subdivision draw- ings if m and r are constant; in\n other words, the problem is linear-time\n fixed-parameter tractable with respect to the\n parameters m and r.},\n keywords =\t {algorithmic graph theory, graph modification},\n pages =\t "67--80",\n isbn =\t "978-3-319-50106-2",\n doi =\t\t "10.1007/978-3-319-50106-2_6",\n}\n\n\n
@article{BKN+16,\n title =\t {H-Index Manipulation by Merging Articles: Models,\n Theory, and Experiments},\n author =\t {René van Bevern and Christian Komusiewicz and Rolf\n Niedermeier and Manuel Sorge and Toby Walsh},\n url_Preprint = {http://arxiv.org/abs/1412.5498},\n url_Code =\t {http://fpt.akt.tu-berlin.de/hindex/},\n year =\t 2016,\n date =\t {2016-11-01},\n journal =\t {Artificial Intelligence},\n volume =\t 240,\n pages =\t {19--35},\n abstract =\t {An author's profile on Google Scholar consists of\n indexed articles and associated data, such as the\n number of citations and the H-index. The author is\n allowed to merge articles, which may affect the\n H-index. We analyze the (parameterized)\n computational complexity of maximizing the H-index\n using article merges. Herein, to model realistic\n manipulation scenarios, we define a compatibility\n graph whose edges correspond to plausible\n merges. Moreover, we consider several different\n measures for computing the citation count of a\n merged article. For the measure used by Google\n Scholar, we give an algorithm that maximizes the\n H-index in linear time if the compatibility graph\n has constant-size connected components. In contrast,\n if we allow to merge arbitrary articles (that is,\n for arbitrary compatibility graphs), then already\n increasing the H-index by one is\n NP-hard. Experiments on Google Scholar profiles of\n AI researchers show that the H-index can be\n manipulated substantially only if one merges\n articles with highly dissimilar titles.},\n doi =\t\t {10.1016/j.artint.2016.08.001},\n keywords =\t {network analysis, parameterized complexity}\n}\n\n\n
@incollection{BBB+16,\n title =\t {Precedence-constrained scheduling problems\n parameterized by partial order width},\n author =\t {René van Bevern and Robert Bredereck and Laurent\n Bulteau and Christian Komusiewicz and Nimrod Talmon\n and Gerhard J. Woeginger},\n url_Preprint = {http://arxiv.org/abs/1605.00901},\n doi =\t\t {10.1007/978-3-319-44914-2_9},\n url_Slides =\t {http://rvb.su/pdf/p-prec-cmax-door16.pdf},\n year =\t {2016},\n date =\t {2016-09-19},\n booktitle =\t {DOOR 2016},\n editor =\t {Yury Kochetov and Michael Khachay and Vladimir\n Beresnev and Evgeni Nurminski and Panos Pardalos},\n publisher =\t {Springer},\n abstract =\t {Negatively answering a question posed by Mnich and\n Wiese (Math. Program. 154(1-2):533-562), we show\n that P2|prec,pj∈{1,2}|Cmax, the problem of finding a\n non-preemptive minimum-makespan schedule for\n precedence-constrained jobs of lengths 1 and 2 on\n two parallel identical machines, is W[2]-hard\n parameterized by the width of the partial order\n giving the precedence constraints. To this end, we\n show that Shuffle Product, the problem of deciding\n whether a given word can be obtained by interleaving\n the letters of k other given words, is W[2]-hard\n parameterized by k, thus additionally answering a\n question posed by Rizzi and Vialette (CSR\n 2013). Finally, refining a geometric algorithm due\n to Servakh\n (Diskretn. Anal. Issled. Oper. 7(1):75-82), we show\n that the more general Resource-Constrained Project\n Scheduling problem is fixed-parameter tractable\n parameterized by the partial order width combined\n with the maximum allowed difference between the\n earliest possible and factual starting time of a\n job. },\n pages =\t {105-120},\n volume =\t {9869},\n series =\t {Lecture Notes in Computer Science},\n keywords =\t {NP-hard, parameterized complexity, scheduling}\n}\n\n\n
@incollection{BKM+16,\n title =\t {H-Index Manipulation by Undoing Merges},\n author =\t {René van Bevern and Christian Komusiewicz and\n Hendrik Molter and Rolf Niedermeier and Manuel Sorge\n and Toby Walsh},\n doi =\t\t {10.3233/978-1-61499-672-9-895},\n year =\t {2016},\n date =\t {2016-08-29},\n booktitle =\t {ECAI 2016},\n editor =\t {Gal A. Kaminka and Maria Fox and Paolo Bouquet and\n Eyke Hüllermeier and Virginia Dignum and Frank\n Dignum and Frank van Harmelen},\n publisher =\t {IOS Press},\n abstract =\t {The h-index is one of the most important\n bibliographic measures used to assess the\n performance of researchers. Van Bevern et\n al. [Artif. Intel., in press] showed that, despite\n computational worst-case hardness results,\n substantial manipulation of the h-index of Google\n Scholar author profiles is possible by merging\n articles. Complementing previous work, we study the\n opposite operation, the splitting of articles, which\n is arguably the more natural operation for\n manipulation and which is also allowed within Google\n Scholar. We present numerous results on\n computational complexity (from linear-time\n algorithms to parameterized hardness results) and\n empirically indicate that at least small\n improvements of the h-index by splitting merged\n articles are easily achievable.},\n volume =\t {285},\n pages =\t {895--903},\n series =\t {Frontiers in Artificial Intelligence and\n Applications},\n keywords =\t {graph modification, NP-hard, parameterized\n complexity}\n}\n\n\n\n
@incollection{BFK16,\n title =\t {Parameterizing edge modification problems above\n lower bounds},\n doi =\t\t {10.1007/978-3-319-34171-2_5},\n author =\t {René van Bevern and Vincent Froese and Christian\n Komusiewicz},\n url_Slides =\t {http://rvb.su/pdf/above-guarantee-editing-csr16.pdf},\n isbn =\t {978-3-319-34170-5},\n year =\t {2016},\n date =\t {2016-05-31},\n booktitle =\t {CSR 2016},\n editor =\t {Alexander S. Kulikov and Gerhard J. Woeginger},\n volume =\t {9691},\n pages =\t {57-72},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {For a fixed graph F, we study the parameterized\n complexity of a variant of the F-Free Editing\n problem: Given a graph G and a natural number k, is\n it possible to modify at most k edges in G so that\n the resulting graph contains no induced subgraph\n isomorphic to F? In our variant, the input\n additionally contains a vertex-disjoint packing H of\n induced subgraphs of G, which provides a lower bound\n h(H) on the number of edge modifications required to\n transform G into an F-free graph. While earlier\n works used the number k as parameter or structural\n parameters of the input graph G, we consider the\n parameter l:=k-h(H) instead, that is, the number of\n edge modifications above the lower bound h(H). We\n show fixed-parameter tractability with respect to l\n for K_3-Free Editing, Feedback Arc Set in\n Tournaments, and Cluster Editing when the packing H\n contains subgraphs with bounded modification\n cost. For K3-Free Editing, we also prove NP-hardness\n in case of edge-disjoint packings of K3s and l=0,\n while for Kq-Free Editing and q≥6, NP-hardness for\n l=0 even holds for vertex-disjoint packings of Kq.},\n keywords =\t {graph modification, parameterized complexity},\n}\n\n\n
@incollection{BP16,\n title =\t {Completing partial schedules for Open Shop with unit\n processing times and routing},\n doi =\t\t {10.1007/978-3-319-34171-2_6},\n author =\t {René van Bevern and Artem V. Pyatkin},\n url_Slides =\t {http://rvb.su/pdf/routing-open-shop-csr16.pdf},\n isbn =\t {978-3-319-34170-5},\n year =\t {2016},\n date =\t {2016-05-31},\n booktitle =\t {CSR 2016},\n editor =\t {Alexander S. Kulikov and Gerhard J. Woeginger},\n volume =\t {9691},\n pages =\t {73-87},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {Open Shop is a classical scheduling problem: given a\n set J of jobs and a set M of machines, find a\n minimum-makespan schedule to process each job J_i∈J\n on each machine M_q∈M for a given amount p_iq of\n time such that each machine processes only one job\n at a time and each job is processed by only one\n machine at a time. In Routing Open Shop, the jobs\n are located in the vertices of an edge-weighted\n graph G=(V,E) whose edge weights determine the time\n needed for the machines to travel between\n jobs. Routing Open Shop is NP-hard for\n |V|=|M|=2. For the special case of unit processing\n times p_iq=1, we exploit Galvin's theorem about\n list-coloring edges of bipartite graphs to prove a\n theorem that gives a sufficient condition for the\n completability of partial schedules. Exploiting this\n schedule completion theorem and integer linear\n programming, we show that Routing Open Shop with\n unit processing times is solvable in 2^O(|V||M|^2\n log |V||M|) poly(|J|)) time, that is,\n fixed-parameter tractable parameterized by\n |V|+|M|. Various upper bounds shown using the\n schedule completion theorem suggest it to be\n likewise beneficial for the development of\n approximation algorithms.},\n keywords =\t {NP-hard, parameterized complexity, routing,\n scheduling}\n}\n\n\n\n
@article{FBNS16,\n title =\t {Exploiting hidden structure in selecting dimensions\n that distinguish vectors},\n author =\t {Vincent Froese and René van Bevern and Rolf\n Niedermeier and Manuel Sorge},\n url_Preprint = {http://arxiv.org/abs/1512.01150},\n doi =\t\t {10.1016/j.jcss.2015.11.011},\n issn =\t {0022-0000},\n year =\t 2016,\n date =\t {2016-05-01},\n journal =\t {Journal of Computer and System Sciences},\n volume =\t 82,\n number =\t 3,\n pages =\t {521--535},\n abstract =\t {The NP-hard Distinct Vectors problem asks to delete\n as many columns as possible from a matrix such that\n all rows in the resulting matrix are still pairwise\n distinct. Our main result is that, for binary\n matrices, there is a complexity dichotomy for\n Distinct Vectors based on the maximum (H) and the\n minimum (h) pairwise Hamming distance between matrix\n rows: Distinct Vectors can be solved in polynomial\n time if H≤2⌈h/2⌉+1, and is NP-complete\n otherwise. Moreover, we explore connections of\n Distinct Vectors to hitting sets, thereby providing\n several fixed-parameter tractability and\n intractability results also for general matrices.}\n}\n\n\n
@article{BCH+15,\n title =\t {Approximability and parameterized complexity of\n multicover by $c$-intervals},\n author =\t {René van Bevern and Jiehua Chen and Falk Hüffner and\n Stefan Kratsch and Nimrod Talmon and Gerhard\n J. Woeginger},\n url_Preprint =\n {https://fpt.akt.tu-berlin.de/publications/c-interval-cover-IPL.pdf},\n doi =\t\t {10.1016/j.ipl.2015.03.004},\n issn =\t {0020-0190},\n year =\t 2015,\n date =\t {2015-10-01},\n journal =\t {Information Processing Letters},\n volume =\t 115,\n number =\t 10,\n pages =\t {744--749},\n abstract =\t {A c-interval is the disjoint union of c intervals over\n N. The c-Interval Multicover problem is the special\n case of Set Multicover where all sets available for\n covering are c-intervals. We strengthen known\n APX-hardness results for c-Interval Multicover, show\n W[1]-hardness when parameterized by the solution size,\n and present fixed-parameter algorithms for alternative\n parameterizations.},\n keywords =\t {scheduling}\n}\n\n\n
@article{BMNW15,\n title =\t {Interval scheduling and colorful independent sets},\n author =\t {René van Bevern and Matthias Mnich and Rolf\n Niedermeier and Mathias Weller},\n url_Preprint = {http://arxiv.org/abs/1402.0851},\n doi =\t\t {10.1007/s10951-014-0398-5},\n url_Code =\t {http://fpt.akt.tu-berlin.de/cis},\n issn =\t {1094-6136},\n year =\t 2015,\n date =\t {2015-10-01},\n journal =\t {Journal of Scheduling},\n volume =\t 18,\n number =\t 5,\n pages =\t {449-469},\n publisher =\t {Springer},\n abstract =\t {Numerous applications in scheduling, such as\n resource allocation or steel manufacturing, can be\n modeled using the NP-hard Independent Set problem\n (given an undirected graph and an integer k, find a\n set of at least k pairwise non-adjacent\n vertices). Here, one encounters special graph\n classes like 2-union graphs (edge-wise unions of two\n interval graphs) and strip graphs (edge-wise unions\n of an interval graph and a cluster graph), on which\n Independent Set remains NP-hard but admits constant\n ratio approximations in polynomial time. We study\n the parameterized complexity of Independent Set on\n 2-union graphs and on subclasses like strip\n graphs. Our investigations significantly benefit\n from a new structural “compactness” parameter of\n interval graphs and novel problem formulations using\n vertex-colored interval graphs. Our main\n contributions are as follows: 1. We show a\n complexity dichotomy: restricted to graph classes\n closed under induced subgraphs and disjoint unions,\n Independent Set is polynomial-time solvable if both\n input interval graphs are cluster graphs, and is\n NP-hard otherwise. 2. We chart the possibilities and\n limits of effective polynomial-time preprocessing\n (also known as kernelization). 3. We extend\n Halldórsson and Karlsson (2006)’s fixed-parameter\n algorithm for Independent Set on strip graphs\n parameterized by the structural parameter “maximum\n number of live jobs” to show that the problem (also\n known as Job Interval Selection) is fixed-parameter\n tractable with respect to the parameter k and\n generalize their algorithm from strip graphs to\n 2-union graphs. Preliminary experiments with random\n data indicate that Job Interval Selection with up to\n 15 jobs and 5⋅10^5 intervals can be solved optimally\n in less than 5 min.},\n keywords =\t {scheduling}\n}\n\n\n
@incollection{BKS15,\n title =\t {Approximation Algorithms for Mixed, Windy, and\n Capacitated Arc Routing Problems},\n doi =\t\t {10.4230/OASIcs.ATMOS.2015.130},\n author =\t {René van Bevern and Christian Komusiewicz and Manuel\n Sorge},\n url_Slides =\n {http://rvb.su/pdf/approximation-directed-arc-routing-atmos15.pdf},\n url_Code =\t {https://gitlab.com/rvb/mwcarp-approx},\n issn =\t {2190-6807},\n year =\t {2015},\n date =\t {2015-09-17},\n booktitle =\t {ATMOS 2015},\n editor =\t {Giuseppe F. Italiano and Marie Schmidt},\n volume =\t {48},\n pages =\t {130--143},\n publisher =\t {Schloss Dagstuhl--Leibniz-Zentrum fuer Informatik},\n series =\t {OpenAccess Series in Informatics (OASIcs)},\n abstract =\t {We show that any alpha(n)-approximation algorithm\n for the n-vertex metric asymmetric Traveling\n Salesperson problem yields O(alpha(C))-approximation\n algorithms for various mixed, windy, and capacitated\n arc routing problems. Herein, C is the number of\n weakly-connected components in the subgraph induced\n by the positive-demand arcs, a number that can be\n expected to be small in applications. In conjunction\n with known results, we derive constant-factor\n approximations if C is in O(log n) and\n O(log(C)/log(log(C)))-approximations in general.},\n keywords =\t {routing},\n url_Code =\t {http://gitlab.com/rvb/mwcarp-approx/},\n}\n\n\n
@article{BFSS15,\n title =\t {On the Parameterized Complexity of Computing\n Balanced Partitions in Graphs},\n author =\t {René van Bevern and Andreas Emil Feldmann and Manuel\n Sorge and Ondřej Suchý},\n url_Preprint = {http://arxiv.org/abs/1312.7014},\n doi =\t\t {10.1007/s00224-014-9557-5},\n url_Slides =\t {http://rvb.su/pdf/bipartition.pdf},\n issn =\t {1432-4350},\n year =\t 2015,\n date =\t {2015-07-08},\n journal =\t {Theory of Computing Systems},\n volume =\t 57,\n number =\t 1,\n pages =\t {1-35},\n publisher =\t {Springer},\n abstract =\t {A balanced partition is a clustering of a graph into\n a given number of equal-sized parts. For instance,\n the Bisection problem asks to remove at most k edges\n in order to partition the vertices into two\n equal-sized parts. We prove that Bisection is FPT\n for the distance to constant cliquewidth if we are\n given the deletion set. This implies FPT algorithms\n for some well-studied parameters such as cluster\n vertex deletion number and feedback vertex\n set. However, we show that Bisection does not admit\n polynomial-size kernels for these parameters. For\n the Vertex Bisection problem, vertices need to be\n removed in order to obtain two equal-sized parts. We\n show that this problem is FPT for the number of\n removed vertices k if the solution cuts the graph\n into a constant number c of connected\n components. The latter condition is unavoidable,\n since we also prove that Vertex Bisection is\n W[1]-hard w.r.t. (k,c). Our algorithms for finding\n bisections can easily be adapted to finding\n partitions into d equal-sized parts, which entails\n additional running time factors of n^O(d). We show\n that a substantial speed-up is unlikely since the\n corresponding task is W[1]-hard w.r.t. d, even on\n forests of maximum degree two. We can, however, show\n that it is FPT for the vertex cover number.},\n keywords =\t {graph modification}\n}\n\n\n\n
@incollection{BKN+15,\n title =\t {H-Index Manipulation by Merging Articles: Models,\n Theory, and Experiments},\n url =\t\t {http://ijcai.org/Abstract/15/119},\n author =\t {René van Bevern and Christian Komusiewicz and Rolf\n Niedermeier and Manuel Sorge and Toby Walsh},\n isbn =\t {978-1-57735-738-4},\n year =\t {2015},\n date =\t {2015-07-01},\n editor =\t {Qiang Yang and Michael J. Wooldridge},\n booktitle =\t {IJCAI 2015},\n pages =\t {808--814},\n publisher =\t {AAAI Press},\n abstract =\t {An author's profile on Google Scholar consists of\n indexed articles and associated data, such as the\n number of citations and the H-index. The author is\n allowed to merge articles, which may affect the\n H-index. We analyze the parameterized complexity of\n maximizing the H-index using article merges. Herein,\n to model realistic manipulation scenarios, we define\n a compatability graph whose edges correspond to\n plausible merges. Moreover, we consider multiple\n possible measures for computing the citation count\n of a merged article. For the measure used by Google\n Scholar, we give an algorithm that maximizes the\n H-index in linear time if the compatibility graph\n has constant-size connected components. In contrast,\n if we allow to merge arbitrary articles, then\n already increasing the H-index by one is\n NP-hard. Experiments on Google Scholar profiles of\n AI researchers show that the H-index can be\n manipulated substantially only by merging articles\n with highly dissimilar titles, which would be easy\n to discover.},\n keywords =\t {network analysis},\n}\n\n\n
@article{BBC+15,\n title =\t {Network-Based Vertex Dissolution},\n author =\t {René van Bevern and Robert Bredereck and Jiehua Chen\n and Vincent Froese and Rolf Niedermeier and Gerhard\n J. Woeginger},\n url_Preprint = {http://arxiv.org/abs/1402.2664},\n doi =\t\t {10.1137/140978880},\n year =\t 2015,\n date =\t {2015-05-01},\n journal =\t {SIAM Journal on Discrete Mathematics},\n volume =\t 29,\n number =\t 2,\n pages =\t {888-914},\n abstract =\t {We introduce a graph-theoretic vertex dissolution\n model that applies to a number of redistribution\n scenarios, such as gerrymandering in political\n districting or work balancing in an online\n situation. The central aspect of our model is the\n deletion of certain vertices and the redistribution\n of their load to neighboring vertices in a\n completely balanced way. We investigate how the\n underlying graph structure, the knowledge of which\n vertices should be deleted, and the relation between\n old and new vertex loads influence the computational\n complexity of the underlying graph problems. Our\n results establish a clear borderline between\n tractable and intractable cases.},\n keywords =\t {network analysis}\n}\n\n\n
@article{BDF+15,\n title =\t {Myhill-Nerode Methods for Hypergraphs},\n author =\t {René van Bevern and Rodney G. Downey and Michael\n R. Fellows and Serge Gaspers and Frances\n A. Rosamond},\n url_Preprint = {http://arxiv.org/abs/1211.1299},\n doi =\t\t {10.1007/s00453-015-9977-x},\n url_Slides =\n {http://rvb.su/pdf/myhill-nerode-hypergraphs-isaac13.pdf},\n issn =\t {0178-4617},\n year =\t 2015,\n date =\t {2015-02-27},\n journal =\t {Algorithmica},\n volume =\t 73,\n number =\t 4,\n pages =\t {696-729},\n publisher =\t {Springer},\n abstract =\t {We give an analog of the Myhill–Nerode theorem from\n formal language theory for hypergraphs and use it to\n derive the following results for two NP-hard\n hypergraph problems. (1) We provide an algorithm for\n testing whether a hypergraph has cutwidth at most k\n that runs in linear time for constant k. In terms of\n parameterized complexity theory, the problem is\n fixed-parameter linear parameterized by k. (2) We\n show that it is not expressible in monadic\n second-order logic whether a hypergraph has bounded\n (fractional, generalized) hypertree width. The proof\n leads us to conjecture that, in terms of\n parameterized complexity theory, these problems are\n W[1]-hard parameterized by the incidence treewidth\n (the treewidth of the incidence graph). Thus, in the\n form of the Myhill–Nerode theorem for hypergraphs,\n we obtain a method to derive linear-time algorithms\n and to obtain indicators for intractability for\n hypergraph problems parameterized by incidence\n treewidth.},\n keywords =\t {hypergraphs, invited}\n}\n\n\n
@InCollection{BNSW15,\n author =\t {René van Bevern and Rolf Niedermeier and Manuel\n Sorge and Mathias Weller},\n booktitle =\t {Arc Routing},\n publisher =\t {SIAM},\n title =\t {Complexity of arc routing problems},\n year =\t {2015},\n chapter =\t {2},\n editor =\t {{\\'{A}}ngel Corberán and Gilbert Laporte},\n isbn =\t {978-1-61197-366-2},\n month =\t feb,\n pages =\t {19--52},\n series =\t {MOS-SIAM Series on Optimization},\n type =\t {chapter},\n volume =\t {20},\n doi =\t\t {10.1137/1.9781611973679.ch2},\n file =\t {BNSW14.pdf:BNSW14.pdf:PDF},\n groups =\t {hs-survey grant},\n keywords =\t {routing},\n timestamp =\t {2020-05-19},\n}\n\n\n\n\n
@article{Bev14b,\n title =\t {Towards Optimal and Expressive Kernelization for\n $d$-Hitting Set},\n author =\t {René van Bevern},\n url_Preprint = {http://arxiv.org/abs/1112.2310v3},\n doi =\t\t {10.1007/s00453-013-9774-3},\n url_Slides =\n {http://rvb.su/pdf/towards-optimal-and-expressive-kernelization-for-hitting-set-63tt.pdf},\n url_Code =\t {http://fpt.akt.tu-berlin.de/hslinkern/},\n issn =\t {0178-4617},\n year =\t 2014,\n date =\t {2014-09-01},\n journal =\t {Algorithmica},\n volume =\t 70,\n number =\t 1,\n pages =\t {129-147},\n publisher =\t {Springer},\n abstract =\t {A sunflower in a hypergraph is a set of hyperedges\n pairwise intersecting in exactly the same vertex\n set. Sunflowers are a useful tool in polynomial-time\n data reduction for problems formalizable as\n d-Hitting Set, the problem of covering all\n hyperedges (whose cardinality is bounded from above\n by a constant d) of a hypergraph by at most k\n vertices. Additionally, in fault diagnosis,\n sunflowers yield concise explanations for “highly\n defective structures”. We provide a linear-time\n algorithm that, by finding sunflowers, transforms an\n instance of d-Hitting Set into an equivalent\n instance comprising at most O(k^d) hyperedges and\n vertices. In terms of parameterized complexity, we\n show a problem kernel with asymptotically optimal\n size (unless coNP⊆NP/poly) and provide experimental\n results that show the practical applicability of our\n algorithm. Finally, we show that the number of\n vertices can be reduced to O(k^(d−1)) with\n additional processing in O(k^(1.5d))\n time—nontrivially combining the sunflower technique\n with problem kernels due to Abu-Khzam and Moser.},\n keywords =\t {hypergraphs, invited}\n}\n\n\n
@incollection{BBC+14b,\n title =\t {Network-Based Dissolution},\n doi =\t\t {10.1007/978-3-662-44465-8_7},\n author =\t {René van Bevern and Robert Bredereck and Jiehua Chen\n and Vincent Froese and Rolf Niedermeier and Gerhard\n J. Woeginger},\n isbn =\t {978-3-662-44464-1},\n year =\t {2014},\n date =\t {2014-08-25},\n booktitle =\t {MFCS 2014},\n editor =\t {Erzsébet Csuhaj-Varjú and Martin Dietzfelbinger and Zoltán Ésik},\n volume =\t {8635},\n pages =\t {69-80},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {We introduce a graph-theoretic dissolution model\n that applies to a number of redistribution scenarios\n such as gerrymandering in political districting or\n work balancing in an online situation. The central\n aspect of our model is the deletion of certain\n vertices and the redistribution of their loads to\n neighboring vertices in a perfectly balanced way. We\n investigate how the underlying graph structure, the\n pre-knowledge of which vertices should be deleted,\n and the relation between old and new vertex loads\n influence the computational complexity of the\n underlying graph problems. Our results establish a\n clear borderline between tractable and intractable\n cases.},\n keywords =\t {network analysis}\n}\n\n\n
@incollection{BBB+14,\n title =\t {Star Partitions of Perfect Graphs},\n doi =\t\t {10.1007/978-3-662-43948-7_15},\n author =\t {René van Bevern and Robert Bredereck and Laurent\n Bulteau and Jiehua Chen and Vincent Froese and Rolf\n Niedermeier and Gerhard J. Woeginger},\n isbn =\t {978-3-662-43947-0},\n year =\t {2014},\n date =\t {2014-07-01},\n booktitle =\t {ICALP 2014},\n editor =\t {Javier Esparza and Pierre Fraigniaud and Thore Husfeldt and Elias Koutsoupias},\n volume =\t {8572},\n pages =\t {174-185},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {The partition of graphs into nice subgraphs is a\n central algorithmic problem with strong ties to\n matching theory. We study the partitioning of\n undirected graphs into stars, a problem known to be\n NP-complete even for the case of stars on three\n vertices. We perform a thorough computational\n complexity study of the problem on subclasses of\n perfect graphs and identify several polynomial-time\n solvable cases, for example, on interval graphs and\n bipartite permutation graphs, and also NP-hard\n cases, for example, on grid graphs and chordal\n graphs.},\n keywords =\t {algorithmic graph theory}\n}\n\n\n
@article{BHNS14,\n title =\t {Constant-factor approximations for Capacitated Arc\n Routing without triangle inequality},\n author =\t {René van Bevern and Sepp Hartung and André\n Nichterlein and Manuel Sorge},\n url_Preprint = {http://arxiv.org/abs/1404.3660},\n doi =\t\t {10.1016/j.orl.2014.05.002},\n issn =\t {0167-6377},\n year =\t 2014,\n date =\t {2014-06-01},\n journal =\t {Operations Research Letters},\n volume =\t 42,\n number =\t 4,\n pages =\t {290-292},\n abstract =\t {Abstract Given an undirected graph with edge costs\n and edge demands, the Capacitated Arc Routing\n problem (CARP) asks for minimum-cost routes for\n equal-capacity vehicles so as to satisfy all\n demands. Constant-factor polynomial-time\n approximation algorithms were proposed for CARP with\n triangle inequality, while CARP was claimed to be\n NP-hard to approximate within any constant factor in\n general. Correcting this claim, we show that any\n factor α approximation for CARP with triangle\n inequality yields a factor α approximation for the\n general CARP.},\n keywords =\t {routing, scheduling}\n}\n\n\n
@incollection{BFGR13,\n title =\t {Myhill-Nerode Methods for Hypergraphs},\n doi =\t\t {10.1007/978-3-642-45030-3_35},\n author =\t {René van Bevern and Michael R. Fellows and Serge\n Gaspers and Frances A. Rosamond},\n url_Slides =\n {http://rvb.su/pdf/myhill-nerode-hypergraphs-isaac13.pdf},\n isbn =\t {978-3-642-45029-7},\n year =\t {2013},\n date =\t {2013-12-13},\n booktitle =\t {ISAAC 2013},\n editor =\t {Leizhen Cai and Siu-Wing Cheng and Tak-Wah Lam},\n volume =\t {8283},\n pages =\t {372-382},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {We introduce a method of applying Myhill-Nerode\n methods from formal language theory to hypergraphs\n and show how this method can be used to obtain the\n following parameterized complexity\n results. Hypergraph Cutwidth (deciding whether a\n hypergraph on n vertices has cutwidth at most k) is\n linear-time solvable for constant k. For hypergraphs\n of constant incidence treewidth (treewidth of the\n incidence graph), Hypertree Width and variants\n cannot be solved by simple finite tree automata. The\n proof leads us to conjecture that Hypertree Width is\n W[1]-hard for this parameter.},\n keywords =\t {hypergraphs}\n}\n\n\n
@incollection{FBNS13,\n title =\t {A Parameterized Complexity Analysis of Combinatorial\n Feature Selection Problems},\n doi =\t\t {10.1007/978-3-642-40313-2_40},\n author =\t {Vincent Froese and René van Bevern and Rolf\n Niedermeier and Manuel Sorge},\n isbn =\t {978-3-642-40312-5},\n year =\t {2013},\n date =\t {2013-08-26},\n booktitle =\t {MFCS 2013},\n editor =\t {Krishnendu Chatterjee and Jirí Sgall},\n volume =\t {8087},\n pages =\t {445-456},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {We examine the algorithmic tractability of NP-hard\n combinatorial feature selection problems in terms of\n parameterized complexity theory. In combinatorial\n feature selection, one seeks to discard dimensions\n from high-dimensional data such that the resulting\n instances fulfill a desired property. In\n parameterized complexity analysis, one seeks to\n identify relevant problem-specific quantities and\n tries to determine their influence on the\n computational complexity of the considered\n problem. In this paper, for various combinatorial\n feature selection problems, we identify\n parameterizations and reveal to what extent these\n govern computational complexity. We provide\n tractability as well as intractability results; for\n example, we show that the Distinct Vectors problem\n on binary points is polynomial-time solvable if each\n pair of points differs in at most three dimensions,\n whereas it is NP-hard otherwise.},\n}\n\n\n
@incollection{BFSS13,\n title =\t {On the Parameterized Complexity of Computing Graph\n Bisections},\n doi =\t\t {10.1007/978-3-642-45043-3_8},\n author =\t {René van Bevern and Andreas Emil Feldmann and Manuel\n Sorge and Ondřej Suchý},\n isbn =\t {978-3-642-45042-6},\n year =\t {2013},\n date =\t {2013-06-19},\n booktitle =\t {WG 2013},\n editor =\t {Andreas Brandstädt and Klaus Jansen and Rüdiger Reischuk},\n volume =\t {8165},\n pages =\t {76-87},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {The Bisection problem asks for a partition of the\n vertices of a graph into two equally sized sets,\n while minimizing the cut size. This is the number of\n edges connecting the two vertex sets. Bisection has\n been thoroughly studied in the past. However, only\n few results have been published that consider the\n parameterized complexity of this problem. We show\n that Bisection is FPT w.r.t. the minimum cut size if\n there is an optimum bisection that cuts into a given\n constant number of connected components. Our\n algorithm applies to the more general Balanced\n Biseparator problem where vertices need to be\n removed instead of edges. We prove that this problem\n is W[1]-hard w.r.t. the minimum cut size and the\n number of cut out components. For Bisection we\n further show that no polynomial-size kernels exist\n for the cut size parameter. In fact, we show this\n for all parameters that are polynomial in the input\n size and that do not increase when taking disjoint\n unions of graphs. We prove fixed-parameter\n tractability for the distance to constant\n cliquewidth if we are given the deletion set. This\n implies fixed-parameter algorithms for some\n well-studied parameters such as cluster vertex\n deletion number and feedback vertex set.},\n url_Slides =\t {http://rvb.su/pdf/bipartition.pdf},\n keywords =\t {graph modification}\n}\n\n\n
@incollection{BBC+13,\n title =\t {Parameterized Complexity of {DAG} Partitioning},\n doi =\t\t {10.1007/978-3-642-38233-8_5},\n author =\t {René van Bevern and Robert Bredereck and Morgan\n Chopin and Sepp Hartung and Falk Hüffner and André\n Nichterlein and Ondřej Suchý},\n isbn =\t {978-3-642-38232-1},\n year =\t {2013},\n date =\t {2013-05-22},\n booktitle =\t {CIAC 2013},\n editor =\t {Paul G. Spirakis and Maria Serna},\n volume =\t {7878},\n pages =\t {49-60},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {The goal of tracking the origin of short,\n distinctive phrases (memes) that propagate through\n the web in reaction to current events has been\n formalized as DAG Partitioning: given a directed\n acyclic graph, delete edges of minimum weight such\n that each resulting connected component of the\n underlying undirected graph contains only one\n sink. Motivated by NP-hardness and hardness of\n approximation results, we consider the parameterized\n complexity of this problem. We show that it can be\n solved in O(2^k·n^2) time, where k is the number of\n edge deletions, proving fixed-parameter tractability\n for parameter k. We then show that unless the\n Exponential Time Hypothesis (ETH) fails, this cannot\n be improved to 2^o(k)·n^O(1); further, DAG\n Partitioning does not have a polynomial kernel\n unless NP ⊆ coNP/poly. Finally, given a tree\n decomposition of width w, we show how to solve DAG\n Partitioning in 2^O(w^2)⋅n time, improving a known\n algorithm for the parameter pathwidth.},\n keywords =\t {graph modification, network analysis},\n}\n\n\n
@incollection{BMNW12,\n title =\t {Interval Scheduling and Colorful Independent Sets},\n doi =\t\t {10.1007/978-3-642-35261-4_28},\n author =\t {René van Bevern and Matthias Mnich and Rolf\n Niedermeier and Mathias Weller},\n isbn =\t {978-3-642-35260-7},\n year =\t {2012},\n date =\t {2012-12-19},\n booktitle =\t {ISAAC 2012},\n editor =\t {Kun-Mao Chao and Tsan-sheng Hsu and Der-Tsai Lee},\n volume =\t {7676},\n pages =\t {247--256},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {The NP-hard Independent Set problem is to determine\n for a given graph G and an integer k whether G\n contains a set of k pairwise non-adjacent\n vertices. The problem has numerous applications in\n scheduling, including resource allocation and steel\n manufacturing. There, one encounters restricted\n graph classes such as 2-union graphs, which are\n edge-wise unions of two interval graphs on the same\n vertex set, or strip graphs, where additionally one\n of the two interval graphs is a disjoint union of\n cliques. We prove NP-hardness of Independent Set on\n a very restricted subclass of 2-union graphs and\n identify natural parameterizations to chart the\n possibilities and limitations of effective\n polynomial-time preprocessing (kernelization) and\n fixed-parameter algorithms. Our algorithms benefit\n from novel formulations of the computational\n problems in terms of (list-)colored interval\n graphs.},\n keywords =\t {scheduling}\n}\n\n\n
@article{SBNW12,\n title =\t {A new view on Rural Postman based on Eulerian\n Extension and Matching},\n author =\t {Manuel Sorge and René van Bevern and Rolf Niedermeier\n and Mathias Weller},\n url_Preprint =\n {http://fpt.akt.tu-berlin.de/publications/A_New_View_on_Rural_Postman_Based_on_Eulerian_Extension_and_Matching-JDA.pdf},\n doi =\t\t {10.1016/j.jda.2012.04.007},\n issn =\t {1570-8667},\n year =\t 2012,\n date =\t {2012-10-01},\n journal =\t {Journal of Discrete Algorithms},\n volume =\t 16,\n pages =\t {12--33},\n abstract =\t {We provide a new characterization of the NP-hard arc\n routing problem Rural Postman in terms of a\n constrained variant of minimum-weight perfect matching\n on bipartite graphs. To this end, we employ a\n parameterized equivalence between Rural Postman and\n Eulerian Extension, a natural arc addition problem in\n directed multigraphs. We indicate the NP-hardness of\n the introduced matching problem. In particular, we use\n the matching problem to make partial progress towards\n answering the open question about the parameterized\n complexity of Rural Postman with respect to the\n parameter “number of weakly connected components in\n the graph induced by the required arcs”. This is a\n more than thirty years open and long-neglected\n question with significant practical relevance.},\n keywords =\t {graph modification, invited, routing}\n}\n\n\n
@incollection{BHK+11,\n title =\t {Linear-Time Computation of a Linear Problem Kernel\n for Dominating Set on Planar Graphs},\n author =\t {René van Bevern and Sepp Hartung and Frank Kammer\n and Rolf Niedermeier and Mathias Weller},\n url_Preprint =\n {http://fpt.akt.tu-berlin.de/publications/Linear-Time_Computation_of_a_Linear_Problem_Kernel_for_Dominating_set_on_Planar_Graphs-IPEC.pdf},\n url_Slides =\n {http://rvb.su/pdf/linear-time-linear-kernel-dominating-set.pdf},\n doi =\t\t {10.1007/978-3-642-28050-4_16},\n isbn =\t {978-3-642-28049-8},\n year =\t {2012},\n date =\t {2012-09-06},\n booktitle =\t {IPEC 2011},\n editor =\t {Dániel Marx and Peter Rossmanith},\n volume =\t {7112},\n pages =\t {194--206},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {We present a linear-time kernelization algorithm\n that transforms a given planar graph G with\n domination number γ(G) into a planar graph G' of\n size O(γ(G)) with γ(G) = γ(G'). In addition, a\n minimum dominating set for G can be inferred from a\n minimum dominating set for G'. In terms of\n parameterized algorithmics, this implies a\n linear-size problem kernel for the NP-hard\n Dominating Set problem on planar graphs, where the\n kernelization takes linear time. This improves on\n previous kernelization algorithms that provide\n linear-size kernels in cubic time.},\n keywords =\t {algorithmic graph theory}\n}\n\n\n
@incollection{Bev12,\n title =\t {Towards Optimal and Expressive Kernelization for\n $d$-Hitting Set},\n doi =\t\t {10.1007/978-3-642-32241-9_11},\n author =\t {René van Bevern},\n url_Slides =\n {http://rvb.su/pdf/towards-optimal-and-expressive-kernelization-for-hitting-set-63tt.pdf},\n isbn =\t {978-3-642-32240-2},\n year =\t {2012},\n date =\t {2012-08-20},\n booktitle =\t {COCOON 2012},\n editor =\t {Joachim Gudmundsson and Julián Mestre and Taso Viglas},\n volume =\t {7434},\n pages =\t {121-132},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {A sunflower in a hypergraph is a set of hyperedges\n pairwise intersecting in exactly the same vertex\n set. Sunflowers are a useful tool in polynomial-time\n data reduction for problems formalizable as\n d-Hitting Set, the problem of covering all\n hyperedges (of cardinality at most d) of a\n hypergraph by at most k vertices. Additionally, in\n fault diagnosis, sunflowers yield concise\n explanations for “highly defective structures”. We\n provide a linear-time algorithm that, by finding\n sunflowers, transforms an instance of d-Hitting Set\n into an equivalent instance comprising at most\n O(k^d) hyperedges and vertices. In terms of\n parameterized complexity, we show a problem kernel\n with asymptotically optimal size (unless\n coNP ⊆ NP/poly). We show that the number of vertices\n can be reduced to O(k^(d−1)) with additional\n processing in O(k^(1.5d)) time—nontrivially\n combining the sunflower technique with problem\n kernels due to Abu-Khzam and Moser.},\n keywords =\t {hypergraphs}\n}\n\n\n
@article{BMN12,\n title =\t {Approximation and Tidying---A Problem Kernel for\n $s$-Plex Cluster Vertex Deletion},\n author =\t {René van Bevern and Hannes Moser and Rolf Niedermeier},\n url_Preprint =\n {http://www.akt.tu-berlin.de/fileadmin/fg34/publications-akt/approximation-and-tidying.pdf},\n url_Slides =\t {http://rvb.su/pdf/kernelization-through-tidying-talk-latin10.pdf},\n doi =\t\t {10.1007/s00453-011-9492-7},\n issn =\t {0178-4617},\n year =\t 2012,\n date =\t {2012-04-01},\n journal =\t {Algorithmica},\n volume =\t 62,\n number =\t {3-4},\n pages =\t {930-950},\n publisher =\t {Springer},\n abstract =\t {We introduce the NP-hard graph-based data clustering\n problem s-Plex Cluster Vertex Deletion, where the task\n is to delete at most k vertices from a graph so that\n the connected components of the resulting graph are\n s-plexes. In an s-plex, every vertex has an edge to\n all but at most s−1 other vertices; cliques are\n 1-plexes. We propose a new method based on\n “approximation and tidying” for kernelizing vertex\n deletion problems whose goal graphs can be\n characterized by forbidden induced subgraphs. The\n method exploits polynomial-time approximation results\n and thus provides a useful link between approximation\n and kernelization. Employing “approximation and\n tidying”, we develop data reduction rules that, in\n O(ksn^2) time, transform an s-Plex Cluster Vertex\n Deletion instance with n vertices into an equivalent\n instance with O(k^2 s^3) vertices, yielding a problem\n kernel. To this end, we also show how to exploit\n structural properties of the specific problem in order\n to significantly improve the running time of the\n proposed kernelization method.},\n keywords =\t {graph modification}\n}\n\n\n
@article{BBF+11,\n title =\t {Parameterized Algorithmics for Finding Connected\n Motifs in Biological Networks},\n author =\t {Nadja Betzler and René van Bevern and Michael\n R. Fellows and Christian Komusiewicz and Rolf\n Niedermeier},\n url_Preprint =\n {http://www.akt.tu-berlin.de/fileadmin/fg34/publications-akt/finding-connected-motifs-in-biological-networks.pdf},\n url_Code =\t {http://fpt.akt.tu-berlin.de/graph-motif/},\n doi =\t\t {10.1109/TCBB.2011.19},\n issn =\t {1545-5963},\n year =\t 2011,\n date =\t {2011-09-01},\n journal =\t {IEEE/ACM Transactions on Computational Biology and\n Bioinformatics},\n volume =\t 8,\n number =\t 5,\n pages =\t {1296-1308},\n abstract =\t {We study the NP-hard LIST-COLORED GRAPH MOTIF problem\n which, given an undirected list-colored graph G = (V,\n E) and a multiset M of colors, asks for\n maximum-cardinality sets S ⊆ V and M' ⊆ M such that\n G[S] is connected and contains exactly (with respect\n to multiplicity) the colors in M'. LIST-COLORED GRAPH\n MOTIF has applications in the analysis of biological\n networks. We study LIST-COLORED GRAPH MOTIF with\n respect to three different parameterizations. For the\n parameters motif size |M| and solution size |S|, we\n present fixed-parameter algorithms, whereas for the\n parameter |V| - |M|, we show W[1]-hardness for general\n instances and achieve fixed-parameter tractability for\n a special case of LIST-COLORED GRAPH MOTIF. We\n implemented the fixed-parameter algorithms for\n parameters |M| and |S|, developed further speed-up\n heuristics for these algorithms, and applied them in\n the context of querying protein-interaction networks,\n demonstrating their usefulness for realistic\n instances. Furthermore, we show that extending the\n request for motif connectedness to stronger demands,\n such as biconnectedness or bridge-connectedness leads\n to W[1]-hard problems when the parameter is the motif\n size |M|.},\n keywords =\t {network analysis}\n}\n\n\n
@incollection{SBNW11,\n title =\t {From Few Components to an Eulerian Graph by Adding\n Arcs},\n author =\t {Manuel Sorge and René van Bevern and Rolf\n Niedermeier and Mathias Weller},\n url_Preprint =\n {http://fpt.akt.tu-berlin.de/publications/From_Few_Components_to_an_Eulerian_Graph_by_Adding_Arcs-WG.pdf},\n doi =\t\t {10.1007/978-3-642-25870-1_28},\n isbn =\t {978-3-642-25869-5},\n year =\t {2011},\n date =\t {2011-06-21},\n booktitle =\t {WG 2011},\n editor =\t {Petr Kolman and Jan Kratochvíl},\n volume =\t {6986},\n pages =\t {307--318},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {Eulerian Extension (EE) is the problem to make an\n arc-weighted directed multigraph Eulerian by adding\n arcs of minimum total cost. EE is NP-hard and has\n been shown fixed-parameter tractable with respect to\n the number of arc additions. Complementing this\n result, on the way to answering an open question, we\n show that EE is fixed-parameter tractable with\n respect to the combined parameter “number of\n connected components in the underlying undirected\n multigraph” and “sum of indeg(v) - outdeg(v) over\n all vertices v in the input multigraph where this\n value is positive.” Moreover, we show that EE is\n unlikely to admit a polynomial-size problem kernel\n for this parameter combination and for the parameter\n “number of arc additions”.},\n keywords =\t {graph modification, routing}\n}\n\n\n
@incollection{BKMN10,\n title =\t {Measuring Indifference: Unit Interval Vertex\n Deletion},\n author =\t {René van Bevern and Christian Komusiewicz and Hannes\n Moser and Rolf Niedermeier},\n url_Preprint =\n {http://www.akt.tu-berlin.de/fileadmin/fg34/publications-akt/unit-interval-vertex-deletion-wg10.pdf},\n doi =\t\t {10.1007/978-3-642-16926-7_22},\n url_Slides =\n {http://rvb.su/pdf/unit-interval-vertex-deletion-talk-wg2010.pdf},\n isbn =\t {978-3-642-16925-0},\n year =\t {2010},\n date =\t {2010-06-28},\n booktitle =\t {WG 2010},\n editor =\t {Dimitrios M. Thilikos},\n volume =\t {6410},\n pages =\t {232--243},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {Making a graph unit interval by a minimum number of\n vertex deletions is NP-hard. The problem is\n motivated by applications in seriation and measuring\n indifference between data items. We present a\n fixed-parameter algorithm based on the iterative\n compression technique that finds in\n O((14k+14)^(k+1)kn^6) time a set of k vertices whose\n deletion from an n-vertex graph makes it unit\n interval. Additionally, we show that making a graph\n chordal by at most k vertex deletions is NP-complete\n even on claw,net,tent-free graphs.},\n keywords =\t {algorithmic graph theory, graph modification}\n}\n\n\n
@incollection{BMN10,\n title =\t {Kernelization through Tidying---A Case Study Based\n on $s$-Plex Cluster Vertex Deletion},\n author =\t {René van Bevern and Hannes Moser and Rolf\n Niedermeier},\n isbn =\t {978-3-642-12199-9},\n url_Slides =\n {http://rvb.su/pdf/kernelization-through-tidying-talk-latin10.pdf},\n year =\t {2010},\n date =\t {2010-04-19},\n booktitle =\t {LATIN 2010},\n editor =\t {Alejandro López-Ortiz},\n volume =\t {6034},\n pages =\t {527-538},\n publisher =\t {Springer},\n series =\t {Lecture Notes in Computer Science},\n abstract =\t {We introduce the NP-hard graph-based data clustering\n problem s-Plex Cluster Vertex Deletion, where the\n task is to delete at most k vertices from a graph so\n that the connected components of the resulting graph\n are s-plexes. In an s-plex, every vertex has an edge\n to all but at most s−1 other vertices; cliques are\n 1-plexes. We propose a new method for kernelizing a\n large class of vertex deletion problems and\n illustrate it by developing an O(k^2 s^3)-vertex\n problem kernel for s-Plex Cluster Vertex Deletion\n that can be computed in O(ksn^2) time, where n is\n the number of graph vertices. The corresponding\n “kernelization through tidying” exploits\n polynomial-time approximation results.},\n doi =\t\t {10.1007/978-3-642-12200-2_46},\n keywords =\t {graph modification}\n}\n\n\n\n