@inproceedings{be0ad4f11421402ea7119c0fbbd5a68c,
title = "DualCross: Cross-Modality Cross-Domain Adaptation for Monocular BEV Perception",
abstract = "Closing the domain gap between training and deployment and incorporating multiple sensor modalities are two challenging yet critical topics for self-driving. Existing work only focuses on single one of the above topics, overlooking the simultaneous domain and modality shift which pervasively exists in real-world scenarios. A model trained with multi-sensor data collected in Europe may need to run in Asia with a subset of input sensors available. In this work, we propose DualCross, a cross-modality cross-domain adaptation framework to facilitate the learning of a more robust monocular bird's-eye-view (BEV) perception model, which transfers the point cloud knowledge from a LiDAR sensor in one domain during the training phase to the camera-only testing scenario in a different domain. This work results in the first open analysis of cross-domain cross-sensor perception and adaptation for monocular 3D tasks in the wild. We benchmark our approach on large-scale datasets under a wide range of domain shifts and show state-of-the-art results against various baselines. Our project webpage is at https://yunzeman.github.io/DualCross.",
author = "Yunze Man and Liangyan Gui and Wang, {Yu Xiong}",
note = "Publisher Copyright: {\textcopyright} 2023 IEEE.; 2023 IEEE/RSJ International Conference on Intelligent Robots and Systems, IROS 2023 ; Conference date: 01-10-2023 Through 05-10-2023",
year = "2023",
doi = "10.1109/IROS55552.2023.10341473",
language = "English (US)",
series = "IEEE International Conference on Intelligent Robots and Systems",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "10910--10917",
booktitle = "2023 IEEE/RSJ International Conference on Intelligent Robots and Systems, IROS 2023",
address = "United States",
}