DTAM: Dense Tracking and Mapping in Real-Time. Newcombe, R., A., Lovegrove, S., J., & Davison, A., J. In IEEE International Conference on Computer Vision, ICCV 2011, Barcelona, Spain, November 6-13, 2011, pages 2320--2327, 2011. Paper abstract bibtex DTAM is a system for real-time camera tracking and reconstruction which relies not on feature extraction but dense, every pixel methods. As a single hand-held RGB camera flies over a static scene, we estimate detailed textured depth maps at selected keyframes to produce a surface patchwork with millions of vertices. We use the hundreds of images available in a video stream to improve the quality of a simple photometric data term, and minimise a global spatially regularised energy functional in a novel non-convex opti-misation framework. Interleaved, we track the camera's 6DOF motion precisely by frame-rate whole image alignment against the entire dense model. Our algorithms are highly parallelisable throughout and DTAM achieves real-time performance using current commodity GPU hardware. We demonstrate that a dense model permits superior tracking performance under rapid motion compared to a state of the art method using features; and also show the additional usefulness of the dense model for real-time scene interaction in a physics-enhanced augmented reality application.
@inproceedings{
title = {DTAM: Dense Tracking and Mapping in Real-Time},
type = {inproceedings},
year = {2011},
pages = {2320--2327},
id = {414947f8-cc24-3dcc-bb9e-0c6065da702c},
created = {2022-09-08T06:32:14.575Z},
accessed = {2022-09-08},
file_attached = {true},
profile_id = {48fc0258-023d-3602-860e-824092d62c56},
group_id = {5ec9cc91-a5d6-3de5-82f3-3ef3d98a89c1},
last_modified = {2022-09-12T10:25:30.464Z},
read = {false},
starred = {false},
authored = {false},
confirmed = {false},
hidden = {false},
folder_uuids = {97693603-b330-4e3e-8cf5-d549e6474921},
private_publication = {false},
abstract = {DTAM is a system for real-time camera tracking and reconstruction which relies not on feature extraction but dense, every pixel methods. As a single hand-held RGB camera flies over a static scene, we estimate detailed textured depth maps at selected keyframes to produce a surface patchwork with millions of vertices. We use the hundreds of images available in a video stream to improve the quality of a simple photometric data term, and minimise a global spatially regularised energy functional in a novel non-convex opti-misation framework. Interleaved, we track the camera's 6DOF motion precisely by frame-rate whole image alignment against the entire dense model. Our algorithms are highly parallelisable throughout and DTAM achieves real-time performance using current commodity GPU hardware. We demonstrate that a dense model permits superior tracking performance under rapid motion compared to a state of the art method using features; and also show the additional usefulness of the dense model for real-time scene interaction in a physics-enhanced augmented reality application.},
bibtype = {inproceedings},
author = {Newcombe, Richard A and Lovegrove, Steven J and Davison, Andrew J},
booktitle = {IEEE International Conference on Computer Vision, ICCV 2011, Barcelona, Spain, November 6-13, 2011}
}
Downloads: 0
{"_id":"bNgjHabcGNmzWHx6q","bibbaseid":"newcombe-lovegrove-davison-dtamdensetrackingandmappinginrealtime-2011","downloads":0,"creationDate":"2018-01-22T16:01:12.982Z","title":"DTAM: Dense Tracking and Mapping in Real-Time","author_short":["Newcombe, R., A.","Lovegrove, S., J.","Davison, A., J."],"year":2011,"bibtype":"inproceedings","biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","bibdata":{"title":"DTAM: Dense Tracking and Mapping in Real-Time","type":"inproceedings","year":"2011","pages":"2320--2327","id":"414947f8-cc24-3dcc-bb9e-0c6065da702c","created":"2022-09-08T06:32:14.575Z","accessed":"2022-09-08","file_attached":"true","profile_id":"48fc0258-023d-3602-860e-824092d62c56","group_id":"5ec9cc91-a5d6-3de5-82f3-3ef3d98a89c1","last_modified":"2022-09-12T10:25:30.464Z","read":false,"starred":false,"authored":false,"confirmed":false,"hidden":false,"folder_uuids":"97693603-b330-4e3e-8cf5-d549e6474921","private_publication":false,"abstract":"DTAM is a system for real-time camera tracking and reconstruction which relies not on feature extraction but dense, every pixel methods. As a single hand-held RGB camera flies over a static scene, we estimate detailed textured depth maps at selected keyframes to produce a surface patchwork with millions of vertices. We use the hundreds of images available in a video stream to improve the quality of a simple photometric data term, and minimise a global spatially regularised energy functional in a novel non-convex opti-misation framework. Interleaved, we track the camera's 6DOF motion precisely by frame-rate whole image alignment against the entire dense model. Our algorithms are highly parallelisable throughout and DTAM achieves real-time performance using current commodity GPU hardware. We demonstrate that a dense model permits superior tracking performance under rapid motion compared to a state of the art method using features; and also show the additional usefulness of the dense model for real-time scene interaction in a physics-enhanced augmented reality application.","bibtype":"inproceedings","author":"Newcombe, Richard A and Lovegrove, Steven J and Davison, Andrew J","booktitle":"IEEE International Conference on Computer Vision, ICCV 2011, Barcelona, Spain, November 6-13, 2011","bibtex":"@inproceedings{\n title = {DTAM: Dense Tracking and Mapping in Real-Time},\n type = {inproceedings},\n year = {2011},\n pages = {2320--2327},\n id = {414947f8-cc24-3dcc-bb9e-0c6065da702c},\n created = {2022-09-08T06:32:14.575Z},\n accessed = {2022-09-08},\n file_attached = {true},\n profile_id = {48fc0258-023d-3602-860e-824092d62c56},\n group_id = {5ec9cc91-a5d6-3de5-82f3-3ef3d98a89c1},\n last_modified = {2022-09-12T10:25:30.464Z},\n read = {false},\n starred = {false},\n authored = {false},\n confirmed = {false},\n hidden = {false},\n folder_uuids = {97693603-b330-4e3e-8cf5-d549e6474921},\n private_publication = {false},\n abstract = {DTAM is a system for real-time camera tracking and reconstruction which relies not on feature extraction but dense, every pixel methods. As a single hand-held RGB camera flies over a static scene, we estimate detailed textured depth maps at selected keyframes to produce a surface patchwork with millions of vertices. We use the hundreds of images available in a video stream to improve the quality of a simple photometric data term, and minimise a global spatially regularised energy functional in a novel non-convex opti-misation framework. Interleaved, we track the camera's 6DOF motion precisely by frame-rate whole image alignment against the entire dense model. Our algorithms are highly parallelisable throughout and DTAM achieves real-time performance using current commodity GPU hardware. We demonstrate that a dense model permits superior tracking performance under rapid motion compared to a state of the art method using features; and also show the additional usefulness of the dense model for real-time scene interaction in a physics-enhanced augmented reality application.},\n bibtype = {inproceedings},\n author = {Newcombe, Richard A and Lovegrove, Steven J and Davison, Andrew J},\n booktitle = {IEEE International Conference on Computer Vision, ICCV 2011, Barcelona, Spain, November 6-13, 2011}\n}","author_short":["Newcombe, R., A.","Lovegrove, S., J.","Davison, A., J."],"urls":{"Paper":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c/file/47b6f2bd-8c2a-dc3b-7a54-5d704760546e/full_text.pdf.pdf"},"biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","bibbaseid":"newcombe-lovegrove-davison-dtamdensetrackingandmappinginrealtime-2011","role":"author","metadata":{"authorlinks":{}},"downloads":0},"search_terms":["dtam","dense","tracking","mapping","real","time","newcombe","lovegrove","davison"],"keywords":[],"authorIDs":[],"dataSources":["9cexBw6hrwgyZphZZ","ya2CyA73rpZseyrZ8","2252seNhipfTmjEBQ"]}