@incollection{eprints3651, pages = {157--166}, series = {Lecture Notes in Computer Science}, number = {9968}, publisher = {Springer International Publishing}, author = {John A. Onofrey and Ilkay Oksuz and Saradwata Sarkar and Rajesh Venkataraman and Lawrence H. Staib and Xenophon Papademetris}, year = {2016}, booktitle = {Simulation and Synthesis in Medical Imaging. First International Workshop, SASHIMI 2016, Held in Conjunction with MICCAI 2016, Athens, Greece, October 21, 2016, Proceedings}, title = {MRI-TRUS Image Synthesis with Application to Image-Guided Prostate Intervention}, url = {http://eprints.imtlucca.it/3651/}, abstract = {Accurate and robust fusion of pre-procedure magnetic resonance imaging (MRI) to intra-procedure trans-rectal ultrasound (TRUS) imaging is necessary for image-guided prostate cancer biopsy procedures. The current clinical standard for image fusion relies on non-rigid surface-based registration between semi-automatically segmented prostate surfaces in both the MRI and TRUS. This surface-based registration method does not take advantage of internal anatomical prostate structures, which have the potential to provide useful information for image registration. However, non-rigid, multi-modal intensity-based MRI-TRUS registration is challenging due to highly non-linear intensities relationships between MRI and TRUS. In this paper, we present preliminary work using image synthesis to cast this problem into a mono-modal registration task by using a large database of over 100 clinical MRI-TRUS image pairs to learn a joint model of MR-TRUS appearance. Thus, given an MRI, we use this learned joint appearance model to synthesize the patient?s corresponding TRUS image appearance with which we could potentially perform mono-modal intensity-based registration. We present preliminary results of this approach.} }