[{"key":"dc.contributor.author","value":"Katebi, Leila","language":null},{"key":"dc.date.accessioned","value":"2026-04-16T17:25:17Z","language":null},{"key":"dc.date.available","value":"2026-04-16T17:25:18Z","language":null},{"key":"dc.date.issued","value":"2026","language":"en"},{"key":"dc.identifier.uri","value":"http:\/\/hdl.handle.net\/2429\/94127","language":null},{"key":"dc.description.abstract","value":"Rapid and reliable assessment of bridge integrity after earthquakes is a critical element of emergency response. Traditional inspections depend on manual visual evaluations, which can delay decisions about bridge closure or repair. This research addresses this limitation by developing a vision-based framework for structural health monitoring of bridges. The proposed approach integrates drone-acquired imagery with advanced computer vision algorithms\u2014specifically, the YOLOv8 object detector and the Segment Anything Model (SAM)\u2014to identify structural components and measure critical displacements, such as support-seat gaps and girder misalignments, in the aftermath of earthquakes. A novel marker-free tracking method (T-SAM) is introduced for dynamic monitoring, allowing displacement tracking from video without the installation of physical markers. A complementary static image analysis workflow (termed YSAMM) combines YOLOv8, SAM, and geometric calibration to quantify residual displacements and rotations between bridge elements from single post-event images.\r\nThe methodology was validated through shake-table experiments and case studies. In dynamic tests on a full-scale wood-frame structure, the marker-free T-SAM tracking achieved millimeter-level accuracy, matching the performance of conventional sensor and marker-based measurements even under varying lighting conditions. In static image analyses, the system measured girder seating lengths and rotations with centimeter-level precision, enabling direct comparison with code-based safety thresholds. Key results indicate that the vision-based approach can reliably detect subtle damage indicators. For example, T-SAM maintained correlation coefficients of around 0.97\u20130.99 with physical sensor data, and the combined YOLO\u2013SAM technique could accurately determine support seat loss within a few centimeters of the ground truth.\r\nOverall, this thesis demonstrates that an inspector-guided, yet automated, vision system can significantly enhance post-earthquake bridge inspections. By rapidly providing quantitative measurements of damage, the developed framework and tool enable more objective, data-driven decisions on bridge safety and reopening, enhancing resilience in disaster response while reducing reliance on time-consuming manual methods.","language":"en"},{"key":"dc.language.iso","value":"eng","language":"en"},{"key":"dc.publisher","value":"University of British Columbia","language":"en"},{"key":"dc.rights","value":"Attribution-NonCommercial-NoDerivatives 4.0 International","language":"*"},{"key":"dc.rights.uri","value":"http:\/\/creativecommons.org\/licenses\/by-nc-nd\/4.0\/","language":"*"},{"key":"dc.title","value":"Vision-based structural health monitoring of bridges with emphasis on post-disaster damage assessment","language":"en"},{"key":"dc.type","value":"Text","language":"en"},{"key":"dc.degree.name","value":"Doctor of Philosophy - PhD","language":"en"},{"key":"dc.degree.discipline","value":"Civil Engineering","language":"en"},{"key":"dc.degree.grantor","value":"University of British Columbia","language":"en"},{"key":"dc.contributor.supervisor","value":"Ventura, Carlos","language":null},{"key":"dc.date.graduation","value":"2026-05","language":"en"},{"key":"dc.type.text","value":"Thesis\/Dissertation","language":"en"},{"key":"dc.description.affiliation","value":"Applied Science, Faculty of","language":"en"},{"key":"dc.description.affiliation","value":"Civil Engineering, Department of","language":"en"},{"key":"dc.degree.campus","value":"UBCV","language":"en"},{"key":"dc.description.scholarlevel","value":"Graduate","language":"en"}]