@misc{oai:repo.qst.go.jp:00072239, author = {坂田, 幸辰 and 平井, 隆介 and 田口, 安則 and 森, 慎一郎 and 坂田 幸辰 and 平井 隆介 and 田口 安則 and 森 慎一郎}, month = {Sep}, note = {Purpose/Objective(s): In particle beam therapy, accurate localization of the target tumor in presence of respiratory motion is necessary for treatment accuracy. A tumor tracking method without fiducial markers using x-ray fluoroscopic images based on multiple learned templates has been previously reported. However, creation of the multiple templates involves inputting the ground truth tumor position for each of the training images manually. In this work, we present a novel method to track lung tumors from fluoroscopic images automatically without fiducial markers. Materials/Methods: The approach chosen for tracking the tumor position is extremely randomized trees (ERT), which is a machine learning–based advanced classifier developed recently. The approach consists of a learning and a tracking stage. In the learning stage, we constructed classifiers from training datasets to estimate the tumor likelihood in the input image pattern. To create the training dataset, we used treatment planning 4-dimensional computed tomography (4D-CT), where the tumor positions in each phase were set by a certificated oncologist. The tumor position together with the image pattern from digitally reconstructed radiograph (DRR) obtained from the 4D-CT were used as training datasets. In the tracking stage, we sampled features from the input fluoroscopic images and classified the samples with our trained ERT to obtain a tumor likelihood map, which shows a relationship between position and tumor likelihood on the image. Tumor position was estimated by fusing the tumor likelihood map with priors obtained from the tumor motion. The tumor position in 3D space can be then be calculated directly using paired x-ray fluoroscopic images. Our method has been quantitatively evaluated on a set of fluoroscopic images of a moving lung phantom with an embedded tumor, which was moved by a mechanical slider. The tumor was tracked with our proposed method and the positional error between calculated and actual positions was evaluated. In addition, tracking results for data from four lung patients were evaluated by a certified oncologist. Results: For the phantom study, the positional accuracy was 0.73 ± 0.27 mm (mean ± standard deviation) in terms of Euclidean 3D distance. The calculation time was less than 0.03 s per frame. When evaluated in the patient study, our method was deemed successful in all cases. Conclusion: We proposed an accurate marker-less tumor tracking method for lung tumors based on machine learning. From the high accuracy and a short computation time obtained in our experiments, we believe our method can significantly improve the accuracy of gate treatment., American Society for Radiation Oncology}, title = {Marker-less Tumor Tracking for Lung Cancer by Tumor Image Pattern Learning}, year = {2016} }