@inproceedings{bfc528c765ad4112a8f086a0de37f164,
title = "DIFFEVENT: EVENT RESIDUAL DIFFUSION FOR IMAGE DEBLURRING",
abstract = "Traditional frame-based cameras inevitably suffer from non-uniform blur in real-world scenarios. Event cameras that record the intensity changes with high temporal resolution provide an effective solution for image deblurring. In this paper, we formulate the event-based image deblurring as an image generation problem by designing diffusion priors for the image and residual. Specifically, we propose an alternative diffusion sampling framework to jointly estimate clear and residual images to ensure the quality of the final result. In addition, to further enhance the subtle details, a pseudoinverse guidance module is leveraged to guide the prediction closer to the input with event data. Note that the proposed method can effectively handle the real unknown degradation without kernel estimation. The experiments on the benchmark event datasets demonstrate the effectiveness of our method.",
keywords = "Image deblurring, diffusion guidance generation, event camera",
author = "Pei Wang and Jiumei He and Qingsen Yan and Yu Zhu and Jinqiu Sun and Yanning Zhang",
note = "Publisher Copyright: {\textcopyright} 2024 IEEE.; 2024 IEEE International Conference on Acoustics, Speech, and Signal Processing, ICASSP 2024 ; Conference date: 14-04-2024 Through 19-04-2024",
year = "2024",
doi = "10.1109/ICASSP48485.2024.10446822",
language = "英语",
series = "ICASSP, IEEE International Conference on Acoustics, Speech and Signal Processing - Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "3450--3454",
booktitle = "2024 IEEE International Conference on Acoustics, Speech, and Signal Processing, ICASSP 2024 - Proceedings",
}