Real-time simultaneous localisation and mapping with a single camera. Davison, A., J. Proceedings of the IEEE International Conference on Computer Vision, 2:1403-1410, 2003. Paper doi abstract bibtex Ego-motion estimation for an agile single camera moving through general, unknown scenes becomes a much more challenging problem when real-time performance is required rather than under the off-line processing conditions under which most successful structure from motion work has been achieved. This task of estimating camera motion from measurements of a continuously expanding set of self-mapped visual features is one of a class of problems known as Simultaneous Localisation and Mapping (SLAM) in the robotics community, and we argue that such real-time mapping research, despite rarely being camera-based, is more relevant here than off-line structure from motion methods due to the more fundamental emphasis placed on propagation of uncertainty. We present a top-down Bayesian framework for single-camera localisation via mapping of a sparse set of natural features using motion modelling and an information-guided active measurement strategy, in particular addressing the difficult issue of real-time feature initialisation via a factored sampling approach. Real-time handling of uncertainty permits robust localisation via the creating and active measurement of a sparse map of landmarks such that regions can be re-visited after periods of neglect and localisation can continue through periods when few features are visible. Results are presented of real-time localisation for a hand-waved camera with very sparse prior scene knowledge and all processing carried out on a desktop PC.
@article{
title = {Real-time simultaneous localisation and mapping with a single camera},
type = {article},
year = {2003},
pages = {1403-1410},
volume = {2},
id = {05b75020-2ae9-3849-8bd0-7fd19dfd126c},
created = {2022-09-13T08:14:28.121Z},
file_attached = {true},
profile_id = {ad172e55-c0e8-3aa4-8465-09fac4d5f5c8},
group_id = {1ff583c0-be37-34fa-9c04-73c69437d354},
last_modified = {2022-09-13T08:14:36.192Z},
read = {false},
starred = {false},
authored = {false},
confirmed = {true},
hidden = {false},
private_publication = {false},
abstract = {Ego-motion estimation for an agile single camera moving through general, unknown scenes becomes a much more challenging problem when real-time performance is required rather than under the off-line processing conditions under which most successful structure from motion work has been achieved. This task of estimating camera motion from measurements of a continuously expanding set of self-mapped visual features is one of a class of problems known as Simultaneous Localisation and Mapping (SLAM) in the robotics community, and we argue that such real-time mapping research, despite rarely being camera-based, is more relevant here than off-line structure from motion methods due to the more fundamental emphasis placed on propagation of uncertainty. We present a top-down Bayesian framework for single-camera localisation via mapping of a sparse set of natural features using motion modelling and an information-guided active measurement strategy, in particular addressing the difficult issue of real-time feature initialisation via a factored sampling approach. Real-time handling of uncertainty permits robust localisation via the creating and active measurement of a sparse map of landmarks such that regions can be re-visited after periods of neglect and localisation can continue through periods when few features are visible. Results are presented of real-time localisation for a hand-waved camera with very sparse prior scene knowledge and all processing carried out on a desktop PC.},
bibtype = {article},
author = {Davison, Andrew J.},
doi = {10.1109/iccv.2003.1238654},
journal = {Proceedings of the IEEE International Conference on Computer Vision}
}
Downloads: 0
{"_id":"rjeehwZw7jf3gAtT7","bibbaseid":"davison-realtimesimultaneouslocalisationandmappingwithasinglecamera-2003","downloads":0,"creationDate":"2018-01-22T16:01:14.031Z","title":"Real-time simultaneous localisation and mapping with a single camera","author_short":["Davison, A., J."],"year":2003,"bibtype":"article","biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","bibdata":{"title":"Real-time simultaneous localisation and mapping with a single camera","type":"article","year":"2003","pages":"1403-1410","volume":"2","id":"05b75020-2ae9-3849-8bd0-7fd19dfd126c","created":"2022-09-13T08:14:28.121Z","file_attached":"true","profile_id":"ad172e55-c0e8-3aa4-8465-09fac4d5f5c8","group_id":"1ff583c0-be37-34fa-9c04-73c69437d354","last_modified":"2022-09-13T08:14:36.192Z","read":false,"starred":false,"authored":false,"confirmed":"true","hidden":false,"private_publication":false,"abstract":"Ego-motion estimation for an agile single camera moving through general, unknown scenes becomes a much more challenging problem when real-time performance is required rather than under the off-line processing conditions under which most successful structure from motion work has been achieved. This task of estimating camera motion from measurements of a continuously expanding set of self-mapped visual features is one of a class of problems known as Simultaneous Localisation and Mapping (SLAM) in the robotics community, and we argue that such real-time mapping research, despite rarely being camera-based, is more relevant here than off-line structure from motion methods due to the more fundamental emphasis placed on propagation of uncertainty. We present a top-down Bayesian framework for single-camera localisation via mapping of a sparse set of natural features using motion modelling and an information-guided active measurement strategy, in particular addressing the difficult issue of real-time feature initialisation via a factored sampling approach. Real-time handling of uncertainty permits robust localisation via the creating and active measurement of a sparse map of landmarks such that regions can be re-visited after periods of neglect and localisation can continue through periods when few features are visible. Results are presented of real-time localisation for a hand-waved camera with very sparse prior scene knowledge and all processing carried out on a desktop PC.","bibtype":"article","author":"Davison, Andrew J.","doi":"10.1109/iccv.2003.1238654","journal":"Proceedings of the IEEE International Conference on Computer Vision","bibtex":"@article{\n title = {Real-time simultaneous localisation and mapping with a single camera},\n type = {article},\n year = {2003},\n pages = {1403-1410},\n volume = {2},\n id = {05b75020-2ae9-3849-8bd0-7fd19dfd126c},\n created = {2022-09-13T08:14:28.121Z},\n file_attached = {true},\n profile_id = {ad172e55-c0e8-3aa4-8465-09fac4d5f5c8},\n group_id = {1ff583c0-be37-34fa-9c04-73c69437d354},\n last_modified = {2022-09-13T08:14:36.192Z},\n read = {false},\n starred = {false},\n authored = {false},\n confirmed = {true},\n hidden = {false},\n private_publication = {false},\n abstract = {Ego-motion estimation for an agile single camera moving through general, unknown scenes becomes a much more challenging problem when real-time performance is required rather than under the off-line processing conditions under which most successful structure from motion work has been achieved. This task of estimating camera motion from measurements of a continuously expanding set of self-mapped visual features is one of a class of problems known as Simultaneous Localisation and Mapping (SLAM) in the robotics community, and we argue that such real-time mapping research, despite rarely being camera-based, is more relevant here than off-line structure from motion methods due to the more fundamental emphasis placed on propagation of uncertainty. We present a top-down Bayesian framework for single-camera localisation via mapping of a sparse set of natural features using motion modelling and an information-guided active measurement strategy, in particular addressing the difficult issue of real-time feature initialisation via a factored sampling approach. Real-time handling of uncertainty permits robust localisation via the creating and active measurement of a sparse map of landmarks such that regions can be re-visited after periods of neglect and localisation can continue through periods when few features are visible. Results are presented of real-time localisation for a hand-waved camera with very sparse prior scene knowledge and all processing carried out on a desktop PC.},\n bibtype = {article},\n author = {Davison, Andrew J.},\n doi = {10.1109/iccv.2003.1238654},\n journal = {Proceedings of the IEEE International Conference on Computer Vision}\n}","author_short":["Davison, A., J."],"urls":{"Paper":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c/file/0ad11a1b-8536-ac73-72d4-2eaa3ac61e32/Real_time_simultaneous_localisation_and_mapping_with_a_single_camera.pdf.pdf"},"biburl":"https://bibbase.org/service/mendeley/bfbbf840-4c42-3914-a463-19024f50b30c","bibbaseid":"davison-realtimesimultaneouslocalisationandmappingwithasinglecamera-2003","role":"author","metadata":{"authorlinks":{}},"downloads":0},"search_terms":["real","time","simultaneous","localisation","mapping","single","camera","davison"],"keywords":[],"authorIDs":[],"dataSources":["9cexBw6hrwgyZphZZ","ya2CyA73rpZseyrZ8","2252seNhipfTmjEBQ"]}