@inproceedings{2b16272c2d9641cbb89353c2f6ebf543,
title = "A Multi-Model Fusion Framework for NIR-to-RGB Translation",
abstract = "Near-infrared (NIR) images provide spectral information beyond the visible light spectrum and thus are useful in many applications. However, single-channel NIR images contain less information per pixel than RGB images and lack visibility for human perception. Transforming NIR images to RGB images is necessary for performing further analysis and computer vision tasks. In this work, we propose a novel NIR-to-RGB translation method. It contains two sub-networks and a fusion operator. Specifically, a U-net based neural network is used to learn the texture information while a CycleGAN based neural network is adopted to excavate the color information. Finally, a guided filter based fusion strategy is applied to fuse the outputs of these two neural networks. Experiment results show that our proposed method achieves superior NIR-to-RGB translation performance.",
keywords = "CycleGAN, guided filter, image fusion, NIR-to-RGB translation, U-net",
author = "Longbin Yan and Xiuheng Wang and Min Zhao and Shumin Liu and Jie Chen",
note = "Publisher Copyright: {\textcopyright} 2020 IEEE.; 2020 IEEE International Conference on Visual Communications and Image Processing, VCIP 2020 ; Conference date: 01-12-2020 Through 04-12-2020",
year = "2020",
month = dec,
day = "1",
doi = "10.1109/VCIP49819.2020.9301787",
language = "英语",
series = "2020 IEEE International Conference on Visual Communications and Image Processing, VCIP 2020",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "459--462",
booktitle = "2020 IEEE International Conference on Visual Communications and Image Processing, VCIP 2020",
}