@inproceedings{be0ad4f11421402ea7119c0fbbd5a68c,
title = "DualCross: Cross-Modality Cross-Domain Adaptation for Monocular BEV Perception",
abstract = "Closing the domain gap between training and deployment and incorporating multiple sensor modalities are two challenging yet critical topics for self-driving. Existing work only focuses on single one of the above topics, overlooking the simultaneous domain and modality shift which pervasively exists in real-world scenarios. A model trained with multi-sensor data collected in Europe may need to run in Asia with a subset of input sensors available. In this work, we propose DualCross, a cross-modality cross-domain adaptation framework to facilitate the learning of a more robust monocular bird's-eye-view (BEV) perception model, which transfers the point cloud knowledge from a LiDAR sensor in one domain during the training phase to the camera-only testing scenario in a different domain. This work results in the first open analysis of cross-domain cross-sensor perception and adaptation for monocular 3D tasks in the wild. We benchmark our approach on large-scale datasets under a wide range of domain shifts and show state-of-the-art results against various baselines. Our project webpage is at https://yunzeman.github.io/DualCross.",
author = "Yunze Man and Liangyan Gui and Wang, {Yu Xiong}",
note = "V. CONCLUSION In this paper, we propose DualCross to estimate 3D scene representation in BEV under domain shift and modality change. To achieve this, we construct a LiDAR-Teacher and distill knowledge from it into a Camera-Student by feature supervision. We further propose to align feature space between the domains using multi-stage adversarial learning. Results on large-scale datasets with diverse domain gaps demonstrate the effectiveness of our approach, which marks a significant step towards robust 3D perception in the wild. Acknowledgement. This work was supported in part by NSF Grant 2106825, NIFA Award 2020-67021-32799, the Jump ARCHES endowment, the NCSA Fellows program, the IBM-Illinois Discovery Accelerator Institute, the Illinois-Insper Partnership, and the Amazon Research Award.; 2023 IEEE/RSJ International Conference on Intelligent Robots and Systems, IROS 2023 ; Conference date: 01-10-2023 Through 05-10-2023",
year = "2023",
doi = "10.1109/IROS55552.2023.10341473",
language = "English (US)",
series = "IEEE International Conference on Intelligent Robots and Systems",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "10910--10917",
booktitle = "2023 IEEE/RSJ International Conference on Intelligent Robots and Systems, IROS 2023",
address = "United States",
}