<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "journalpublishing.dtd"><article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" dtd-version="2.0" xml:lang="en" article-type="research-article"><front><journal-meta><journal-id journal-id-type="nlm-ta">JMIR XR Spatial Comput</journal-id><journal-id journal-id-type="publisher-id">xr</journal-id><journal-id journal-id-type="index">46</journal-id><journal-title>JMIR XR and Spatial Computing (JMXR)</journal-title><abbrev-journal-title>JMIR XR Spatial Comput</abbrev-journal-title><issn pub-type="epub">2818-3045</issn><publisher><publisher-name>JMIR Publications</publisher-name><publisher-loc>Toronto, Canada</publisher-loc></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">v2i1e74429</article-id><article-id pub-id-type="doi">10.2196/74429</article-id><article-categories><subj-group subj-group-type="heading"><subject>Original Paper</subject></subj-group></article-categories><title-group><article-title>Virtual Reality Application for Teaching Complex Congenital Heart Defect Anatomy: Design and Development Study</article-title></title-group><contrib-group><contrib contrib-type="author" corresp="yes"><name name-style="western"><surname>Muneton</surname><given-names>Kevin</given-names></name><degrees>MD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Buyck</surname><given-names>David</given-names></name><degrees>DS</degrees><xref ref-type="aff" rid="aff2">2</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Guerrero-Chalela</surname><given-names>Carlos-Eduardo</given-names></name><degrees>MD</degrees><xref ref-type="aff" rid="aff3">3</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Narasimhan</surname><given-names>Shanti</given-names></name><degrees>MD</degrees><xref ref-type="aff" rid="aff4">4</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Iaizzo</surname><given-names>Paul A</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib></contrib-group><aff id="aff1"><institution>The Visible Heart Laboratories, University of Minnesota</institution><addr-line>B172 Mayo, MMC195, 420 Delaware St SE</addr-line><addr-line>Minneapolis</addr-line><addr-line>MN</addr-line><country>United States</country></aff><aff id="aff2"><institution>Department of Bioinformatics and Computational Biology, University of Minnesota</institution><addr-line>Minneapolis</addr-line><addr-line>MN</addr-line><country>United States</country></aff><aff id="aff3"><institution>3D Printing and Modelling Centre La Cardio-Andes, Institute of Cardiology, Fundacion Cardioinfantil Instituto de Cardiolog&#x00ED;a</institution><addr-line>Bogot&#x00E1;</addr-line><country>Colombia</country></aff><aff id="aff4"><institution>Division of Pediatric Cardiology, University of Minnesota</institution><addr-line>Minneapolis</addr-line><addr-line>MN</addr-line><country>United States</country></aff><contrib-group><contrib contrib-type="editor"><name name-style="western"><surname>Leung</surname><given-names>Tiffany</given-names></name></contrib></contrib-group><contrib-group><contrib contrib-type="reviewer"><name name-style="western"><surname>Werner</surname><given-names>Heron</given-names></name></contrib><contrib contrib-type="reviewer"><name name-style="western"><surname>Odame-Amoabeng</surname><given-names>Sylvester</given-names></name></contrib></contrib-group><author-notes><corresp>Correspondence to Kevin Muneton, MD, The Visible Heart Laboratories, University of Minnesota, B172 Mayo, MMC195, 420 Delaware St SE, Minneapolis, MN, 55455, United States, 1 5618713359; <email>kmuneton@umn.edu</email></corresp></author-notes><pub-date pub-type="collection"><year>2025</year></pub-date><pub-date pub-type="epub"><day>22</day><month>12</month><year>2025</year></pub-date><volume>2</volume><elocation-id>e74429</elocation-id><history><date date-type="received"><day>24</day><month>03</month><year>2025</year></date><date date-type="rev-recd"><day>09</day><month>12</month><year>2025</year></date><date date-type="accepted"><day>10</day><month>12</month><year>2025</year></date></history><copyright-statement>&#x00A9; Kevin Muneton, David Buyck, Carlos-Eduardo Guerrero-Chalela, Shanti Narasimhan, Paul A Iaizzo. Originally published in JMIR XR and Spatial Computing (<ext-link ext-link-type="uri" xlink:href="https://xr.jmir.org">https://xr.jmir.org</ext-link>), 22.12.2025. </copyright-statement><copyright-year>2025</copyright-year><license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (<ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">https://creativecommons.org/licenses/by/4.0/</ext-link>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR XR and Spatial Computing, is properly cited. The complete bibliographic information, a link to the original publication on <ext-link ext-link-type="uri" xlink:href="https://xr.jmir.org/">https://xr.jmir.org/</ext-link>, as well as this copyright and license information must be included.</p></license><self-uri xlink:type="simple" xlink:href="https://xr.jmir.org/2025/1/e74429"/><abstract><sec><title>Background</title><p>Medical education still faces challenges in teaching complex human cardiac anatomy to a wide range of learners, especially in the subject of congenital heart defects. Traditional educational methods such as cadaver dissection and the use of textbooks still face some limitations, for example, specimen availability and student comfort. The objective of this research is to use newer technologies, like virtual reality (VR), to teach the anatomy of congenital heart defects (CHDs) of 3D heart models from real computed tomography (CT)-scan images, to offer a more profound learning experience for learners and mentors.</p></sec><sec><title>Objective</title><p>The objective of this study is to propose and implement an innovative pipeline for a VR-based educational application for CHDs, and to assess its acceptance and suitability for medical education by evaluating potential future users.</p></sec><sec sec-type="methods"><title>Methods</title><p>Heart images of anonymized CT scans were used to create high-fidelity 3D cardiac models, using Materialise Mimics Core and 3-matic Medical. These models were next integrated into a Unity-powered VR application. A heterogeneous group of cardiologists, biomedical engineers, and medical trainees visiting the Visible Heart Laboratories was invited to this pilot study to assess perceived effectiveness and satisfaction of this VR app for CHD education.</p></sec><sec sec-type="results"><title>Results</title><p>A total of 9 participants were included in the study, comprising 4 (44%) cardiologists, 2 (22%) biomedical engineers, and 2 (22%) medical trainees. Mean perception based on potential for CHD education of this VR-app (4.56, SD 0.53), was higher than traditional methods (3.22, SD 0.026<italic>; P</italic>=.008). Participants rated this application more suitable for medical students (mean=3.22, SD 0.83; <italic>P</italic>=.02) and patient education (mean=4.11, SD 1.05; <italic>P</italic>=.03).</p></sec><sec sec-type="conclusions"><title>Conclusions</title><p>Our developed VR application offers an innovative approach to teaching the complex cardiac anatomy of CHDs using models from CT scans. This research shows how newer technologies can be optimized for medical education with the advantage of 3D visualization and a teaching environment without geographical limitations. Future work should focus on Extended Reality (XR) integration, adding more heart and organ models, as well as performing long-term efficacy studies.</p></sec></abstract><kwd-group><kwd>medical education</kwd><kwd>virtual reality</kwd><kwd>congenital heart disease</kwd><kwd>congenital heart defect</kwd><kwd>anatomy</kwd><kwd>innovation</kwd></kwd-group></article-meta></front><body><sec id="s1" sec-type="intro"><title>Introduction</title><p>Current curricula in medical education often face the challenges of keeping pace with newly developed teaching methods: for example, these include those introduced by technological advancements such as the use of web applications, virtual reality (VR), and extended reality (XR) [<xref ref-type="bibr" rid="ref1">1</xref>]. The study of complexities and variations within human anatomy has historically been based on the use of fresh cadavers, hearts preserved in formaldehyde, and the study of anatomy textbooks [<xref ref-type="bibr" rid="ref2">2</xref>,<xref ref-type="bibr" rid="ref3">3</xref>]. The use of new technologies in medical education, such as XR and VR, is particularly valuable given the complex spatial architecture of the human heart [<xref ref-type="bibr" rid="ref4">4</xref>,<xref ref-type="bibr" rid="ref5">5</xref>]. This complexity is further amplified in the context of teaching congenital heart defects (CHD), where anatomical variations are highly diverse. Students face significant cognitive challenges in understanding these conditions due to the intertwined vascular structures, dynamic relationships between cardiac components, and the wide variability of defects that can occur from one patient to another [<xref ref-type="bibr" rid="ref5">5</xref>,<xref ref-type="bibr" rid="ref6">6</xref>].</p><p>The use of traditional dissection and the study of macroscopic anatomy, as well as generating human models, while of particular importance in acquiring practical skills that are difficult to replace [<xref ref-type="bibr" rid="ref7">7</xref>-<xref ref-type="bibr" rid="ref9">9</xref>], are methods with numerous limitations. Today, many medical school anatomy laboratories have difficulties maintaining adequate quantities of specimens in the needed conditions for educational study [<xref ref-type="bibr" rid="ref10">10</xref>]. The practice of only studying surface anatomy falls short when one needs to understand complex anatomical details, such as vasculature, complexities of nervous innervations, associated congenital heart defects, and other intricate aspects [<xref ref-type="bibr" rid="ref7">7</xref>,<xref ref-type="bibr" rid="ref10">10</xref>]. It should also be noted that some students don&#x2019;t have access to a dissection opportunity, and in others, such environments can be a source of stress and anxiety [<xref ref-type="bibr" rid="ref11">11</xref>,<xref ref-type="bibr" rid="ref12">12</xref>]. In this regard, the integration of XR technologies into medical education represents a significant advancement for many learners in the application of a better understanding of complex human anatomical and clinical knowledge required in healthcare or the medical device field.</p><p>Extended reality technologies are an umbrella term that encompasses VR, augmented reality (AR), and mixed reality (MR) educational tools [<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref14">14</xref>]. In general, VR is typically described as a computer-generated three-dimensional space that simulates the presence of physical objects through multisensory interactions, creating immersive experiences in which users perceive artificial worlds as if they were real. It can also be characterized by its capacity to be a computer-generated space where multiple users can have simultaneous access and thus participate in shared activities [<xref ref-type="bibr" rid="ref15">15</xref>-<xref ref-type="bibr" rid="ref17">17</xref>]. While different types of metaverse share common features such as head-mounted displays for virtual immersions, motion tracking with 6-degree of freedom for real-time interactions, haptic feedbacks, and spatial audio to create a more immersive experience [<xref ref-type="bibr" rid="ref15">15</xref>,<xref ref-type="bibr" rid="ref18">18</xref>], VR differs from XR in that XR superimposes digital information onto the user&#x2019;s real physical environment, while VR completely isolates the user from the outside world, immersing them in a fully simulated virtual environment with which they can interact [<xref ref-type="bibr" rid="ref15">15</xref>,<xref ref-type="bibr" rid="ref19">19</xref>].</p><p>According to recently reported studies on knowledge construction, perhaps a more effective way to better retain knowledge is through &#x201C;active learning&#x201D; or &#x201C;learning by doing.&#x201D; This implies that individuals better acquire critical knowledge through interactive and self-directed activities, such as those offered by VR [<xref ref-type="bibr" rid="ref20">20</xref>]. The advantages of VR in educational settings include its ability to overcome physical barriers, allowing learners to engage with complex content regardless of location or access to physical resources [<xref ref-type="bibr" rid="ref15">15</xref>]. This feature also enables collaboration among multiple users, providing greater flexibility to develop new applications and initiate diverse activities [<xref ref-type="bibr" rid="ref6">6</xref>]. For example, by engaging with these VR applications, students can pursue self-directed learning and address their questions with greater autonomy [<xref ref-type="bibr" rid="ref15">15</xref>]. Within the Visible Heart Laboratory, virtual platforms with interactive 3D models have been successfully used by medical students and residents for training relative to both cardiac physiology and transesophageal echocardiography [<xref ref-type="bibr" rid="ref4">4</xref>,<xref ref-type="bibr" rid="ref21">21</xref>]. By leveraging an extensive collection of over 650 perfusion-fixed human cardiac specimens, our laboratory continues to generate high-resolution 3D computational models that, in turn, facilitate precise virtual placements of medical devices [<xref ref-type="bibr" rid="ref6">6</xref>]. Additionally, multiple applications have been developed and used in the field of presurgical planning, using VR in our facilities, and the unique availability of an extensive range of fixed human hearts. This approach has been the cornerstone for generating multiple educational and collaborative tools such as our Stenting simulator, Seldinger technique guidewire simulator, and augmented reality applications like the &#x201C;Heart to Learn App&#x201D; [<xref ref-type="bibr" rid="ref22">22</xref>,<xref ref-type="bibr" rid="ref23">23</xref>].</p><p>To evaluate the relative efficacies of using VR for human anatomic education, multiple research groups have consistently described the associated improvements this approach offers. For instance, Minouei et al [<xref ref-type="bibr" rid="ref24">24</xref>] and Liu et al [<xref ref-type="bibr" rid="ref25">25</xref>], in systematic reviews, found that VR commonly enhanced the students&#x2019; academic progress in terms of theoretical knowledge, practical proficiency, and overall satisfaction when used as a supplementary methods to other teaching approaches. Similarly, Baek et al [<xref ref-type="bibr" rid="ref2">2</xref>] described 6 VR applications for the visualization and study of anatomical structures. They recommend that the use of these applications should be tailored to the student&#x2019;s needs and suggested several ideal conditions for the creation of a VR application, such as the need for detailed tutorials, a realistic description of the anatomical model, motion animations, periodic updates, cost effectiveness, and high resolution. Other educators have conducted a controlled clinical study, examining the roles of interactive 3D models in medical anatomy education; in such, 200 medical students were surveyed, and the results showed a statistically significant improvement in post-test knowledge when compared to the control group [<xref ref-type="bibr" rid="ref26">26</xref>]. Further, Garcia-Robles et al [<xref ref-type="bibr" rid="ref27">27</xref>] and Salimi et al [<xref ref-type="bibr" rid="ref28">28</xref>], in their meta-analyses, demonstrated that XR generated increases in knowledge compared to traditional learning methods, especially when used with complementary resources. They noted that these learning approaches were most beneficial for undergraduate students, of whom 80% reported that when they used such VR tools, they were useful for learning complex anatomies. Similarly, Liu and Wang et al [<xref ref-type="bibr" rid="ref25">25</xref>] reported a higher satisfaction rating in groups of health care workers and students who used VR compared to other learning methods.</p><p>It is also important to consider the limitations of VR use in some individuals, which can generate adverse effects such as visually induced motion sickness (VIMS). In a pilot study, VIMS was reported in 32% of participants, of which 40% stated that experiencing VIMS could negatively impact their learning process when using VR educational tools [<xref ref-type="bibr" rid="ref29">29</xref>].</p><p>One should also consider that gaps currently persist in the specific implementations of video game engines for applications in educational cardiac modeling, such as the implementation of haptic devices to simulate organ deformation by touching it, gamification strategies (rankings, scores), developing effective ways to reduce side effects (blurriness, dizziness), and performing more studies with a large sample size to assess the best way to deliver immersive education to students; this is particularly true in the field of CHD [<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref31">31</xref>].</p><p>This study proposes an innovative pipeline that combines high-fidelity 3D modeling based on real tomographic data with dynamic visualization of myocardial layers and blood volume, and the implementations of such in Unity 3D [<xref ref-type="bibr" rid="ref32">32</xref>], to create a VR-based application that can be used for educational purposes in CHDs, we want to measure acceptance and suitability of this application on medical practice by assessing possible future users of the application.</p></sec><sec id="s2" sec-type="methods"><title>Methods</title><sec id="s2-1"><title>Image Acquisition and 3D Modeling</title><p>The methodologies employed in our work were developed in several sequential stages, beginning with the acquisition and processing of medical images. After informed consent, collection of primary and secondary medical data and information de-identification, anonymous DICOM images from patients were obtained through computed tomography (CT) from cases shared by Hospital La Cardio (Bogot&#x00E1;, Colombia), and from the anonymous database within the Visible Heart<sup>&#x00AE;</sup> Laboratories (University of Minnesota, Minneapolis, USA). Following their initial clinical use, associated images were anonymized using the safe-harbor method to ensure patient confidentiality; the resulting dataset contained only the anonymized DICOM files required for 3D reconstruction. No additional consent was required during the development of this study, as the anonymized data posed no risks to patient privacy.</p><p>These image datasets were processed using Materialize Mimics Core 27.0 software to perform semi-automatic segmentation of cardiac structures. As these investigations and development of de-identified 3D computation models did not directly involve human participants or use identifiable personal data, that is, they did not require approval from an ethics committee.</p><p>In general, the segmentation process included automatic image alignments to ensure anatomical symmetry, followed by blood volume segmentations of ventricles, atria, and great vessels using adaptive thresholding (HU: approximately 200 to 1500) via the software&#x2019;s CT Heart tools. For a given heart, specific cardiac defects were identified, such as interventricular communications, interatrial communications, patent ductus arteriosus, and/or others. Anatomical borders were manually refined using the Edit Masks function, achieving precision up to 1 mm, and internal hollowing was applied to simulate cardiac cavities.</p><p>Subsequently, these segmented congenital heart models were exported to Materialise 3-matic Medical 19.0 for optimization and final 3D modeling. At this stage, topological mesh repair was performed, eliminating nonmanifold triangles, and cardiac valves were added using a parametric model library, adjusting their scale to the specific dimensions of each patient&#x2019;s heart. Anatomical textures were applied, and surfaces were smoothed using the Smooth Surface software module. The final models were exported in OBJ format, with mesh parameters optimized for VR performance.</p></sec><sec id="s2-2"><title>VR Application Development</title><p>During the development of the VR-based educational application, Unity software 2021.3 LTS with XR Interaction Toolkit 2.3 was used. The generated 3D heart models were then obtained from 3-matic and were imported to Unity as optimized mesh assets. To ensure complete visualization of the cardiac structures, backface culling was disabled, allowing the users to actively &#x201C;cut&#x201D; into the heart and analyze the internal structures of the given 3D heart model.</p><p>To optimize anatomical accuracy, custom shader-based materials were added with deoxygenated and oxygenated blood volumes color-coded with blue and red, respectively, as well as myocardium rendered in flesh tones, and the various pathological defects were highlighted: remaining visually coherent under a dynamic real-time clipping system, developed using shader programming.</p><p>A floating in-world Canvas menu was designed using Unity&#x2019;s XR Ray Interactor system, allowing users to have seamless navigation between heart models, toggling different cardiopathies, and scrolling texts. Beyond single-user exploration, this unique educational application was designed to support real-time multiplayer interactions, using Unity&#x2019;s Netcode for GameObjects: users can communicate in real time using integrated Vivox Voice Chat and Photon PUN networking.</p><p>While the initial release targets personal computer&#x2013;based VR setups, a standalone version was optimized for lower-performance hardware using texture compression and GPU instancing to maintain rendering efficiency. Compatibility with OpenXR ensures broader accessibility across multiple headsets, including the Valve Index, HTC Vive, and Oculus Rift.</p><sec id="s2-2-1"><title>VR App: Innovative Functionalities</title><p>Our developed educational application includes a floating menu that categorizes heart defects into cyanotic and noncyanotic (<xref ref-type="fig" rid="figure1">Figure 1</xref>), with dynamic descriptive text incorporated within relative to each heart condition to be studied and explanatory images of predictive blood flows. Users can move the menu in the VR space as needed. The main functionalities allow users to manipulate cardiac models, including scaling, free rotations, and the ability to perform multiplanar cuts using Shader Graph. Additionally, there is an incorporated pointer feature that any of the users can use to mark the given defect to be mentioned or mark a specific location on the heart (<xref ref-type="fig" rid="figure2">Figure 2</xref>).</p><fig position="float" id="figure1"><label>Figure 1.</label><caption><p>Congenital heart defects floating menu with animated normal heart.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="xr_v2i1e74429_fig01.png"/></fig><fig position="float" id="figure2"><label>Figure 2.</label><caption><p>Pointer and cutting plane tools for this virtual reality application.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="xr_v2i1e74429_fig02.png"/></fig><p>Multiple users can also freely interact with a heart with normal anatomy, which is animated to visualize how different structures of a normal heart, such as valves and myocardial walls, interact during the cardiac cycle of systole and diastole. Additionally, users can interact with various pathological models adapted from real tomographies, such as: for noncyanotic heart defects, a ventricular septal defect, a sinus venosus type atrial septal defect, a patent ductus arteriosus, an atrioventricular septal defect, a coarctation of the aorta, and for cyanotic heart defects, with truncus arteriosus, transposition of the great arteries, and tetralogy of Fallot (ToF) (<xref ref-type="fig" rid="figure3">Figure 3</xref>).</p><fig position="float" id="figure3"><label>Figure 3.</label><caption><p>Tetralogy of Fallot heart model and description.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="xr_v2i1e74429_fig03.png"/></fig><p>To facilitate collaborative teaching, a multiplayer architecture was implemented using Netcode for GameObjects, allowing synchronization of 3D models and actions across multiple devices in different global locations (<xref ref-type="fig" rid="figure4">Figure 4</xref>). This enables the use of a virtual classroom where the teacher has the abilities to instruct anatomy classes in real-time, point out defects with simultaneous visualizations by students, and guide anatomical tours, while students can access interactive menus and communicate with each other as well as the teacher for more immersive experiences.</p><fig position="float" id="figure4"><label>Figure 4.</label><caption><p>Collaborative multiplayer environment.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="xr_v2i1e74429_fig04.png"/></fig></sec></sec><sec id="s2-3"><title>Participants and Intervention</title><p>To evaluate the functionalities of our newly developed application, this study utilized a cross-sectional observational design, it was conducted at the Visible Heart Laboratories (University of Minnesota, Minneapolis, USA). Data collection took place between 09/16/2025 to 09/20/2025.</p><p>The study used a convenience sampling method; a heterogeneous group of experts in cardiology, biomedical engineers, and medical trainees who visit our facilities for training on cardiology procedures and research purposes were invited to participate in our pilot study. Participants were provided with an Information Sheet for Research outlining the study procedures. Given that no Protected Health Information was being collected, the Information Sheet for Research provided before the data collection served as an Informed Consent process. Note, all participation was voluntary, and no incentives were given (<xref ref-type="supplementary-material" rid="app1">Multimedia Appendix 1</xref>)<italic>.</italic></p><p>Users were asked to try the VR tool in person after having received detailed instructions on how to use the educational tool. After that, it was required for the participants to select a given CHD, manipulate a VR heart, and find where the defect is located, slicing the heart with a virtual multiplanar cut, to identify the specific defect within the heart in different points of view. Participants were given the opportunity to freely interact with the VR environment for 15 minutes. After the VR intervention. Finally, after the intervention, participants completed a professional background and a 23-item training satisfaction survey. Survey responses were collected via an electronic form; these were filled out with a personal device. Data were collected anonymously using online Google Forms (Google LLC).</p><p>The sample size was determined by the availability of eligible participants visiting the laboratory during the data collection window. To minimize recall bias, the survey was administered immediately after the VR experience. Selection bias is acknowledged due to the convenience sampling of individuals already present in a specialized cardiac research center.</p></sec><sec id="s2-4"><title>Collection Instruments</title><p>A QR code was provided to answer the survey in which we gathered information related to <italic>Professional background:</italic> Professional role, years of experience in the field, experience with VR, experience with CHD education; and <italic>VR-application satisfaction survey</italic> that was assessed using a Likert survey adapted from the previously validated System Usability Scale [<xref ref-type="bibr" rid="ref33">33</xref>] and a validated measure of VR user experience [<xref ref-type="bibr" rid="ref34">34</xref>], also some other additional satisfaction questions about educational challenges on CHD education, VR application evaluation and implementation feasibility, also some open-ended questions were given for qualitative feedback. All responses were collected in an unidentifiable manner on a Google Forms spreadsheet (<xref ref-type="supplementary-material" rid="app1">Multimedia Appendices 1</xref><xref ref-type="supplementary-material" rid="app2"/>-<xref ref-type="supplementary-material" rid="app3">3</xref>)<italic>.</italic></p></sec><sec id="s2-5"><title>Statistical Analysis</title><p>Anonymized raw data were exported from Google Forms into Microsoft Excel for data organization. All statistical analysis was performed using Python (version 3.10) with Pandas and SciPy libraries. Descriptive statistics were conducted for all variables; categorical variables were reported as frequencies and percentages (n, %), whereas continuous variables were expressed as mean (SD) values, with 95% CI.</p><p>To determine the primary hypothesis regarding the application utility versus traditional methods effectiveness, a Wilcoxon signed-rank test was used to compare the responses within the same subjects (this test was selected due to sample size, N=9). One-sample Wilcoxon signed-rank tests were used to determine statistically significant responses different from neutral (score=3). Statistical significance was defined as a two-tailed <italic>P</italic> value &#x003C;.05.</p></sec><sec id="s2-6"><title>Ethical Considerations</title><p>The protocol of this study follows ethical guidelines of the 1975 Declaration of Helsinki, ensuring compliance with international standards for the ethical use of human data in research and was reviewed and approved by the IRB #CEIC-0074&#x2010;2022 by Hospital LaCardio (Bogota, Colombia), and by the IRB #MOD00055981 (Atlas of Human Cardiac Anatomy: pediatric heart defects) at the University of Minnesota (Minneapolis, MN, USA). The medical images used in this study were originally obtained for clinical and educational purposes, with informed consent secured from patients at the time of imaging for educational and research uses. Following their initial clinical use, associated images were anonymized to ensure patient confidentiality. No additional consent was required during the development of this study, as the anonymized data posed no risks to patient privacy.</p></sec></sec><sec id="s3" sec-type="results"><title>Results</title><sec id="s3-1"><title>Background Information</title><p>A total of 9 participants completed the survey, in which 4 (44%) were cardiologists, 2 (22%) were medical trainees, 1(11%) a biomedical engineer PhD student, 1 (11%) a biomedical engineer faculty member, and 1 (11%) a cardiac researcher. Respondents reported a wide spectrum of VR familiarity, from no prior experience to regular users, with 1 person having more than 20 years of experience, 1 user having between 11&#x2010;20 years of experience, 2 users between 6&#x2010;10 years of experience, 4 users having between 2&#x2010;5 years of experience and 1 person having less than 2 years of experience in their respective fields. Of those, 5 users described themselves as being &#x201C;moderately familiar&#x201D; with the use of VR, 1 user self-described as &#x201C;very familiar,&#x201D; and 3 users self-described as being &#x201C;expert level&#x201D; (See <xref ref-type="table" rid="table1">Table 1</xref>).</p><table-wrap id="t1" position="float"><label>Table 1.</label><caption><p>Participants characteristics.</p></caption><table id="table1" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Characteristics and Category</td><td align="left" valign="bottom">Participants (N=9), n (%)</td></tr></thead><tbody><tr><td align="left" valign="top">Professional role</td><td align="left" valign="top"/></tr><tr><td align="left" valign="top">&#x2003;Pediatric cardiologist</td><td align="left" valign="top">&#x2003;3 (33.30)</td></tr><tr><td align="left" valign="top">&#x2003;Medical student/Resident</td><td align="left" valign="top">&#x2003;2 (22.20)</td></tr><tr><td align="left" valign="top">&#x2003;Adult CHD<sup><xref ref-type="table-fn" rid="table1fn1">a</xref></sup> cardiologist</td><td align="left" valign="top">&#x2003;1 (11.10)</td></tr><tr><td align="left" valign="top">&#x2003;Cardiac researcher</td><td align="left" valign="top">&#x2003;1 (11.10)</td></tr><tr><td align="left" valign="top">&#x2003;Bioengineering faculty</td><td align="left" valign="top">&#x2003;1 (11.10)</td></tr><tr><td align="left" valign="top">&#x2003;BME PhD student</td><td align="left" valign="top">&#x2003;1 (11.10)</td></tr><tr><td align="left" valign="top">Years of experience</td><td align="left" valign="top"/></tr><tr><td align="left" valign="top">&#x2003;2&#x2010;5 years</td><td align="left" valign="top">&#x2003;4 (44.40)</td></tr><tr><td align="left" valign="top">&#x2003;6&#x2010;10 years</td><td align="left" valign="top">&#x2003;2 (22.20)</td></tr><tr><td align="left" valign="top">&#x2003;&#x003C;2 years</td><td align="left" valign="top">&#x2003;1 (11.10)</td></tr><tr><td align="left" valign="top">&#x2003;11&#x2010;20 years</td><td align="left" valign="top">&#x2003;1 (11.10)</td></tr><tr><td align="left" valign="top">&#x2003;&#x003E;20 years</td><td align="left" valign="top">&#x2003;1 (11.10)</td></tr><tr><td align="left" valign="top">Prior VR<sup><xref ref-type="table-fn" rid="table1fn2">b</xref></sup> experience</td><td align="left" valign="top"/></tr><tr><td align="left" valign="top">&#x2003;Moderate (used several apps)</td><td align="left" valign="top">&#x2003;4 (44.40)</td></tr><tr><td align="left" valign="top">&#x2003;Extensive/Expert</td><td align="left" valign="top">&#x2003;2 (22.20)</td></tr><tr><td align="left" valign="top">&#x2003;Limited/None</td><td align="left" valign="top">&#x2003;3 (33.30)</td></tr></tbody></table><table-wrap-foot><fn id="table1fn1"><p><sup>a</sup>CHD: congenital heart defect.</p></fn><fn id="table1fn2"><p><sup>b</sup>VR: virtual reality.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-2"><title>Perceived Effectiveness: VR vs Traditional Methods</title><p>Participants rated their perception of potential educational value for the VR app higher with a mean of 4.56 (SD 0.53), than traditional methods&#x2019; effectiveness with a mean of 3.22 (SD 0.97), with a significant statistical difference (<italic>P</italic>=.008), using a Wilcoxon signed rank-test (See <xref ref-type="fig" rid="figure5">Figure 5</xref>).</p><fig position="float" id="figure5"><label>Figure 5.</label><caption><p>Perceived effectiveness of VR-based education versus traditional methods. VR: virtual reality.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="xr_v2i1e74429_fig05.png"/></fig></sec><sec id="s3-3"><title>Perceived Suitability in Different Educational Contexts</title><p>Participants evaluated the application&#x2019;s suitability for various target audiences given its educational content; they rated the application suitable for medical students at preclinical and clinical years, (mean 3.22, <italic>P</italic>=.02) and for patient education (mean 4.11, <italic>P</italic>=.03). Participants were likely to recommend the application (mean 4.44, SD=0.73, <italic>P</italic>=.009). Ratings for anatomical accuracy (mean 3.88) and suitability for practicing physicians (mean 4.11) were positive but not statistically significant. (See <xref ref-type="table" rid="table2">Table 2</xref> and <xref ref-type="fig" rid="figure6">Figure 6</xref>).</p><table-wrap id="t2" position="float"><label>Table 2.</label><caption><p>Perceived effectiveness and suitability of VR-based<sup><xref ref-type="table-fn" rid="table2fn1">a</xref></sup> education for congenital heart defect.</p></caption><table id="table2" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Survey item</td><td align="left" valign="bottom">Mean (SD)</td><td align="left" valign="bottom"><italic>P</italic> value</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="3">Primary comparison</td></tr><tr><td align="left" valign="top">Traditional methods effectiveness</td><td align="left" valign="top">&#x2003;3.22 (0.97)</td><td align="left" valign="top">&#x2003;Ref</td></tr><tr><td align="left" valign="top">VR application educational value</td><td align="left" valign="top">&#x2003;4.56 (0.53)</td><td align="left" valign="top">&#x2003;.008</td></tr><tr><td align="left" valign="top" colspan="3">Suitability</td></tr><tr><td align="left" valign="top">Likelihood to recommend</td><td align="left" valign="top">&#x2003;4.44 (0.73)</td><td align="left" valign="top">&#x2003;.009</td></tr><tr><td align="left" valign="top">Suitability: Preclinical students</td><td align="left" valign="top">&#x2003;4.22 (0.83)</td><td align="left" valign="top">&#x2003;.012</td></tr><tr><td align="left" valign="top">Suitability: Clinical students</td><td align="left" valign="top">&#x2003;4.22 (0.83)</td><td align="left" valign="top">&#x2003;.012</td></tr><tr><td align="left" valign="top">Suitability: Patient education</td><td align="left" valign="top">&#x2003;4.11 (1.05)</td><td align="left" valign="top">&#x2003;.03</td></tr><tr><td align="left" valign="top">Suitability: Practicing physicians</td><td align="left" valign="top">&#x2003;4.11 (1.27)</td><td align="left" valign="top">&#x2003;.07</td></tr><tr><td align="left" valign="top">Anatomical accuracy</td><td align="left" valign="top">&#x2003;3.88 (1.13)</td><td align="left" valign="top">&#x2003;.07</td></tr></tbody></table><table-wrap-foot><fn id="table2fn1"><p><sup>a</sup>VR: virtual reality.</p></fn></table-wrap-foot></table-wrap><fig position="float" id="figure6"><label>Figure 6.</label><caption><p>Perceived suitability for educational contexts.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="xr_v2i1e74429_fig06.png"/></fig></sec><sec id="s3-4"><title>Usability and Feasibility</title><p>It should be noted that the main significant challenges of CHD education, recognized by the users, were related to the complexities of the 3D spatial relationships recognized by 9 out of 9 (100%) of users, as well as the lack of interactive learning tools 6 (66%), and the limited availability of specimens and cases 3 (33%). The ongoing educational methods most commonly used by the users for CHD education were 2D illustrations/textbooks 9 (100%), followed by PowerPoint presentations 8 (88%) and clinical cases or rotations 4 (44%). Most users agreed that one of the most innovative features of our developed VR application was having real high-resolution CT scan-based 3D models 9 (100%) and real-time multiplanar cutting tool 7 (77%). Regarding the relative accuracies of the models, 3 (33%) of the respondents noted that they were found to be extremely accurate, 2 (22%) thought that they were very accurate, 2 (22%) noted that they were moderately accurate, and 1 (11%) considered them slightly accurate.</p><p>Regarding the feasibility of implementing these CHD learning tools, the most noted concerns for this VR application were technical complexity for users 6 (66%), 2 (22%) of individuals thought that the models needed to be more moderately accurate, and most felt that the costs of VR hardware 6 (66%) were a barrier. Also, 6 (66%) of users noted that one of the most critical barriers for ultimately implementing this VR tool at their institution would be related to budget constraints. When we asked the question, what additional features would enhance the educational value of this VR application, 6 (66%) of the users recommended adding post-surgical procedural simulations, while 5 (55%) recommended built-in assessment/quiz tools. Others noted that haptic feedback capabilities and additional cardiac pathologies would also be of value.</p></sec><sec id="s3-5"><title>Qualitative Feedback</title><p>For the obtained feedback on the open-ended questions, a large majority of users praised the CHD VR application&#x2019;s ability to visualize complex 3D anatomy and their relationships with associated defects. Most respondents commonly mentioned that the benefit was improving anatomical understanding. Users noted:</p><disp-quote><p>It offers a lot better 3D understanding of the anatomy.</p></disp-quote><p>and</p><disp-quote><p>It can help better visualize 3D interactions and gather people without geographical constraints.</p></disp-quote><p>Yet it should be noted that the participants most commonly recommended adding blood-flow simulations and improving user interfaces for collaboration, which would help guarantee that the user is able to follow the educational purpose of the app. One suggested:</p><disp-quote><p>Have better CHD models, be more intuitive, and have a set of cut hearts already. Also, adding blood flow through the defects would be very interesting.</p></disp-quote><p>Others advised about the risks of losing face-to-face interactions:</p><disp-quote><p>Needs to guarantee that the student/patient is able to follow, we are losing the face-to-face interaction, and we would not be able to tell if they are following, listening, understanding, etc.</p></disp-quote><p>Most of the common concerns about implementing such VR technology in the educational field were related to technical support, hardware costs, and continued maintenance, as well as educational institutions&#x2019; resources. Users displayed reservations about:</p><disp-quote><p>Widespread implementation across places with different financial resources.</p><p>The maintenance of VR systems and the longevity of the technology.</p></disp-quote><p>Most respondents agreed that our CHD VR tool would be most effective in small group sessions, in self-directed learning, or within one-on-one patient interactions.</p></sec></sec><sec id="s4" sec-type="discussion"><title>Discussion</title><sec id="s4-1"><title>Principal Findings</title><p>Our findings demonstrate that a VR application integrated with specific CHD models from real CT images is perceived of high educational value for CHD education, compared to traditional methods by a group of experts and trainees, consistent with previous literature [<xref ref-type="bibr" rid="ref25">25</xref>,<xref ref-type="bibr" rid="ref31">31</xref>]. While other similar applications exist [<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref31">31</xref>], participants highlighted the integration of CT scans to generate a high fidelity heart model of great value (9 out of 9 participants, 100%), as well as the ability to perform multiplanar real-time sectioning on the heart models serving as a virtual dissecting platform to overcome physical constrains, such as, limited availability of specimens, noted by 3 out of 9 participants (33%).</p></sec><sec id="s4-2"><title>Educational utility</title><p>Most survey respondents also agreed that small group sessions and an independent learning setting could be the main uses for this application, specifically for medical education training and patient education, which is consistent with previous systematic reviews and meta-analyses on XR and medical education on knowledge improvement [<xref ref-type="bibr" rid="ref27">27</xref>,<xref ref-type="bibr" rid="ref28">28</xref>]. Of interest, most suggested future features we should consider for future implementations should include: post-surgical procedure simulations, additional cardiac pathologies, progress tracking and quiz tools, multi-language support, as well as AI integration.</p><p>Thus, our work should contribute to the field of digital medical education by providing a reproducible framework for developing immersive cardiac simulators, while simultaneously addressing technical and pedagogical aspects. Note that, unlike commercial VR solutions, our approaches prioritize didactic customizations, facilitating curricular adaptations. Developed educational VR applications in general offer multiple advantages over traditional learning methods. Here, we discuss the ability to allow multiplayer visualization, real-time interactivity with complex cardiac structures, active and collective learning in a virtual classroom, and increased accessibility. These features require the use of VR, a valuable supplement to traditional education methods, potentially helping to reduce stress and anxiety for some students [<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref27">27</xref>]; but some may induce undesired side effects.</p></sec><sec id="s4-3"><title>Limitations</title><p>Despite its potential learning advantages, it is important to consider some limitations of VR use, which can cause adverse effects such as visually induced motion sickness or &#x201C;cybersickness&#x201D; that may negatively impact or even limit the uses of these learning experiences for some users [<xref ref-type="bibr" rid="ref29">29</xref>,<xref ref-type="bibr" rid="ref35">35</xref>]. Similar to what was described in other VR educational studies, even though users think that this is a good adjunctive tool that could be used for medical education, they also believe that potential drawbacks for this could be software and hardware maintenance issues. Additionally, the technological learning curves can vary from user to user, depending on their initial familiarity with controls and video game usage. Further, for some, implementation costs can also be a factor, including expenses associated with hardware, software, and human resources such as systems engineers, computer scientists, graphic designers, healthcare personnel, and researchers, among others [<xref ref-type="bibr" rid="ref6">6</xref>]. We acknowledge that a potential novelty bias associated with VR educational technologies should be acknowledged by lessening the educational outcomes of subjects interacting with new technologies for the first time [<xref ref-type="bibr" rid="ref36">36</xref>]. The use of convenience sampling with the participants of different activities at a cardiovascular laboratory means that participants will have higher knowledge and/or interest in cardiovascular VR applications, which poses one of the greatest limitations of this study. It is important to note that a small sample size limits the statistical power and generalizability of the study.</p></sec><sec id="s4-4"><title>Future directions</title><p>Furthermore, long-term comparative controlled clinical studies should be designed and performed associated with our developed VR platform, to assess knowledge retention in students and/or patients with congenital heart diseases. It is also important to consider other ethical implications of using VR of real patient anatomy in medical education. Additionally, the costs of such technologies could, in part, create educational inequalities in regions with fewer economic resources. Finally, it is essential to ensure that students do not develop an excessive dependence on VR, neglecting other essential practical skills such as direct tissue manipulations and/or dissections.</p></sec><sec id="s4-5"><title>Conclusions</title><p>This framework portrays the integration of an innovative VR application with high-fidelity CT scans of Congenital Heart Defects as a potential tool for medical and patient education. Our findings suggest that VR could be easily implemented and adapted by medical educators to bridge the cognitive gap in understanding complex defects. This work also serves as a stepping stone into a more advanced and technological approach for education using newer tools like AI or XR as educational tools for medical curricula. Furthermore, its ongoing development and continuous collaboration between multiple research centers and academic institutions could have a substantial impact on the training of future healthcare professionals and on patient education. The VR platform we described here could help all types of learners better understand CHDs, provide a means for clinicians around the world to discuss complex CHD and treatment approaches, and aid medical device designers relative to future innovations.</p></sec></sec></body><back><ack><p>We want to acknowledge the University of Minnesota - Visible Heart Laboratories and Fundaci&#x00F3;n Cardioinfantil LaCardio and its researchers (Dr. Isabella Casallas Gutierrez and Dr. Alejandro Herrera), for providing data, technology, and human resources that made this study possible.</p><p>The authors declare the use of generative AI (GAI) in the research and writing process. According to the GAIDeT (Generative Artificial Intelligence Delegation Taxonomy, 2025), the following tasks were delegated to GAI tools under full human supervision: Proofreading and editing.</p><p>The GAI tool used was: Grammarly.</p><p>Responsibility for the final manuscript lies entirely with the authors.</p><p>GAI tools are not listed as authors and do not bear responsibility for the final outcomes.</p><p>Declaration submitted by: Manuscript writing &#x2013; authors</p></ack><notes><sec><title>Funding</title><p>No external financial support or grants were received from any public, commercial, or not-for-profit entities for the research, authorship, or publication of this article.</p></sec><sec><title>Data Availability</title><p>The data that support the findings of this study are available on request from the corresponding author. The data are not publicly available due to privacy or ethical restrictions.</p></sec></notes><fn-group><fn fn-type="con"><p>KM: Investigation (lead), methodology (lead), writing &#x2013; original draft (lead). DB: software (lead), writing &#x2013; review &#x0026; editing (supporting). CG: conceptualization (equal), Project Administration (lead), Supervision (lead). SN: conceptualization (equal). PAI: conceptualization(equal), Supervision (equal), Funding Acquisition (equal).</p></fn><fn fn-type="conflict"><p>The authors associated with the Visible Heart Laboratory (VHL) declare a non-financial conflict of interest (COI) in that they were directly involved in the design and development of the Virtual Reality application that is the subject of this evaluation study. This COI is being disclosed to ensure transparency regarding the authors&#x2019; foundational role in the technology being assessed. All other authors report no financial or non-financial conflicts of interest.</p></fn></fn-group><glossary><title>Abbreviations</title><def-list><def-item><term id="abb1">AR</term><def><p>augmented reality</p></def></def-item><def-item><term id="abb2">CHD</term><def><p>congenital heart defects</p></def></def-item><def-item><term id="abb3">CHD</term><def><p>congenital heart defects</p></def></def-item><def-item><term id="abb4">CT</term><def><p>computed tomography</p></def></def-item><def-item><term id="abb5">GAI</term><def><p>generative artificial intelligence</p></def></def-item><def-item><term id="abb6">VIMS</term><def><p>visually induced motion sickness</p></def></def-item><def-item><term id="abb7">VR</term><def><p>virtual reality</p></def></def-item><def-item><term id="abb8">XR</term><def><p>extended reality</p></def></def-item></def-list></glossary><ref-list><title>References</title><ref id="ref1"><label>1</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Li</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Peng</surname><given-names>C</given-names> </name><name name-style="western"><surname>Zhao</surname><given-names>M</given-names> </name><name name-style="western"><surname>He</surname><given-names>Q</given-names> </name></person-group><article-title>A bibliometric analysis of virtual reality in anatomy teaching between 1999 and 2022</article-title><source>Front Educ</source><year>2022</year><month>07</month><day>19</day><volume>7</volume><pub-id pub-id-type="doi">10.3389/feduc.2022.874406</pub-id></nlm-citation></ref><ref id="ref2"><label>2</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Baek</surname><given-names>SW</given-names> </name><name name-style="western"><surname>Yeo</surname><given-names>T</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>HJ</given-names> </name><etal/></person-group><article-title>Systematic analysis of anatomy virtual reality (VR) apps for advanced education and further applications</article-title><source>Sci Rep</source><year>2024</year><month>12</month><day>30</day><volume>14</volume><issue>1</issue><fpage>31835</fpage><pub-id pub-id-type="doi">10.1038/s41598-024-82945-z</pub-id><pub-id pub-id-type="medline">39738416</pub-id></nlm-citation></ref><ref id="ref3"><label>3</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>An</surname><given-names>GJ</given-names> </name></person-group><article-title>An integrative review on cadaver practice among undergraduate nursing students</article-title><source>J Korean Biol Nurs Sci</source><year>2023</year><month>02</month><volume>25</volume><issue>1</issue><fpage>8</fpage><lpage>19</lpage><pub-id pub-id-type="doi">10.7586/jkbns.23.348</pub-id></nlm-citation></ref><ref id="ref4"><label>4</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gorbaty</surname><given-names>B</given-names> </name><name name-style="western"><surname>Arango</surname><given-names>S</given-names> </name><name name-style="western"><surname>Buyck</surname><given-names>D</given-names> </name><etal/></person-group><article-title>Virtual reality-based methods for training novice Electrophysiology trainees-a pilot study</article-title><source>J Innov Card Rhythm Manag</source><year>2023</year><month>09</month><volume>14</volume><issue>9</issue><fpage>5583</fpage><lpage>5599</lpage><pub-id pub-id-type="doi">10.19102/icrm.2023.14091</pub-id><pub-id pub-id-type="medline">37781721</pub-id></nlm-citation></ref><ref id="ref5"><label>5</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Maresky</surname><given-names>HS</given-names> </name><name name-style="western"><surname>Oikonomou</surname><given-names>A</given-names> </name><name name-style="western"><surname>Ali</surname><given-names>I</given-names> </name><name name-style="western"><surname>Ditkofsky</surname><given-names>N</given-names> </name><name name-style="western"><surname>Pakkal</surname><given-names>M</given-names> </name><name name-style="western"><surname>Ballyk</surname><given-names>B</given-names> </name></person-group><article-title>Virtual reality and cardiac anatomy: Exploring immersive three-dimensional cardiac imaging, a pilot study in undergraduate medical anatomy education</article-title><source>Clin Anat</source><year>2019</year><month>03</month><volume>32</volume><issue>2</issue><fpage>238</fpage><lpage>243</lpage><pub-id pub-id-type="doi">10.1002/ca.23292</pub-id><pub-id pub-id-type="medline">30295333</pub-id></nlm-citation></ref><ref id="ref6"><label>6</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Buyck</surname><given-names>D</given-names> </name><name name-style="western"><surname>Gherciuc</surname><given-names>S</given-names> </name><name name-style="western"><surname>Gorbaty</surname><given-names>B</given-names> </name><name name-style="western"><surname>Escudero</surname><given-names>EV</given-names> </name><name name-style="western"><surname>Arango</surname><given-names>S</given-names> </name><name name-style="western"><surname>Perry</surname><given-names>TE</given-names> </name><etal/></person-group><article-title>Virtual and augmented realities for cardiac education and device training</article-title><source>In Handbook of Cardiac Anatomy, Physiology, and Devices</source><year>2024</year><edition/><publisher-name>Springer Nature</publisher-name><fpage>967</fpage><lpage>981</lpage><pub-id pub-id-type="doi">10.1007/978-3-031-72581-4_48</pub-id></nlm-citation></ref><ref id="ref7"><label>7</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Al-Hor</surname><given-names>M</given-names> </name><name name-style="western"><surname>Almahdi</surname><given-names>H</given-names> </name><name name-style="western"><surname>Al-Theyab</surname><given-names>M</given-names> </name><name name-style="western"><surname>Mustafa</surname><given-names>AG</given-names> </name><name name-style="western"><surname>Seed Ahmed</surname><given-names>M</given-names> </name><name name-style="western"><surname>Zaqout</surname><given-names>S</given-names> </name></person-group><article-title>Exploring student perceptions on virtual reality in anatomy education: insights on enjoyment, effectiveness, and preferences</article-title><source>BMC Med Educ</source><year>2024</year><month>12</month><day>2</day><volume>24</volume><issue>1</issue><fpage>1405</fpage><pub-id pub-id-type="doi">10.1186/s12909-024-06370-6</pub-id><pub-id pub-id-type="medline">39623380</pub-id></nlm-citation></ref><ref id="ref8"><label>8</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Granger</surname><given-names>NA</given-names> </name></person-group><article-title>Dissection laboratory is vital to medical gross anatomy education</article-title><source>Anat Rec B New Anat</source><year>2004</year><month>11</month><volume>281</volume><issue>1</issue><fpage>6</fpage><lpage>8</lpage><pub-id pub-id-type="doi">10.1002/ar.b.20039</pub-id><pub-id pub-id-type="medline">15558779</pub-id></nlm-citation></ref><ref id="ref9"><label>9</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Korf</surname><given-names>HW</given-names> </name><name name-style="western"><surname>Wicht</surname><given-names>H</given-names> </name><name name-style="western"><surname>Snipes</surname><given-names>RL</given-names> </name><etal/></person-group><article-title>The dissection course - necessary and indispensable for teaching anatomy to medical students</article-title><source>Ann Anat</source><year>2008</year><volume>190</volume><issue>1</issue><fpage>16</fpage><lpage>22</lpage><pub-id pub-id-type="doi">10.1016/j.aanat.2007.10.001</pub-id><pub-id pub-id-type="medline">18342138</pub-id></nlm-citation></ref><ref id="ref10"><label>10</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Parker</surname><given-names>LM</given-names> </name></person-group><article-title>Anatomical dissection: why are we cutting it out? Dissection in undergraduate teaching</article-title><source>ANZ J Surg</source><year>2002</year><month>12</month><volume>72</volume><issue>12</issue><fpage>910</fpage><lpage>912</lpage><pub-id pub-id-type="doi">10.1046/j.1445-2197.2002.02596.x</pub-id><pub-id pub-id-type="medline">12485233</pub-id></nlm-citation></ref><ref id="ref11"><label>11</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Evans</surname><given-names>EJ</given-names> </name><name name-style="western"><surname>Fitzgibbon</surname><given-names>GH</given-names> </name></person-group><article-title>The dissecting room: Reactions of first year medical students</article-title><source>Clin Anat</source><year>1992</year><month>01</month><volume>5</volume><issue>4</issue><fpage>311</fpage><lpage>320</lpage><pub-id pub-id-type="doi">10.1002/ca.980050408</pub-id></nlm-citation></ref><ref id="ref12"><label>12</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Finkelstein</surname><given-names>P</given-names> </name><name name-style="western"><surname>Mathers</surname><given-names>LH</given-names> </name></person-group><article-title>Post&#x2010;traumatic stress among medical students in the anatomy dissection laboratory</article-title><source>Clin Anat</source><year>1990</year><month>01</month><volume>3</volume><issue>3</issue><fpage>219</fpage><lpage>226</lpage><pub-id pub-id-type="doi">10.1002/ca.980030308</pub-id></nlm-citation></ref><ref id="ref13"><label>13</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Curran</surname><given-names>VR</given-names> </name><name name-style="western"><surname>Xu</surname><given-names>X</given-names> </name><name name-style="western"><surname>Aydin</surname><given-names>MY</given-names> </name><name name-style="western"><surname>Meruvia-Pastor</surname><given-names>O</given-names> </name></person-group><article-title>Use of extended reality in medical education: an integrative review</article-title><source>Med Sci Educ</source><year>2023</year><month>02</month><volume>33</volume><issue>1</issue><fpage>275</fpage><lpage>286</lpage><pub-id pub-id-type="doi">10.1007/s40670-022-01698-4</pub-id><pub-id pub-id-type="medline">36569366</pub-id></nlm-citation></ref><ref id="ref14"><label>14</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Logeswaran</surname><given-names>A</given-names> </name><name name-style="western"><surname>Munsch</surname><given-names>C</given-names> </name><name name-style="western"><surname>Chong</surname><given-names>YJ</given-names> </name><name name-style="western"><surname>Ralph</surname><given-names>N</given-names> </name><name name-style="western"><surname>McCrossnan</surname><given-names>J</given-names> </name></person-group><article-title>The role of extended reality technology in healthcare education: Towards a learner-centred approach</article-title><source>Future Healthc J</source><year>2021</year><month>03</month><volume>8</volume><issue>1</issue><fpage>e79</fpage><lpage>e84</lpage><pub-id pub-id-type="doi">10.7861/fhj.2020-0112</pub-id><pub-id pub-id-type="medline">33791482</pub-id></nlm-citation></ref><ref id="ref15"><label>15</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kye</surname><given-names>B</given-names> </name><name name-style="western"><surname>Han</surname><given-names>N</given-names> </name><name name-style="western"><surname>Kim</surname><given-names>E</given-names> </name><name name-style="western"><surname>Park</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Jo</surname><given-names>S</given-names> </name></person-group><article-title>Educational applications of metaverse: possibilities and limitations</article-title><source>J Educ Eval Health Prof</source><year>2021</year><volume>18</volume><fpage>32</fpage><pub-id pub-id-type="doi">10.3352/jeehp.2021.18.32</pub-id><pub-id pub-id-type="medline">34897242</pub-id></nlm-citation></ref><ref id="ref16"><label>16</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Han</surname><given-names>HW</given-names> </name></person-group><article-title>A study on typology of virtual world and its development in metaverse</article-title><source>J Digit Contents Soc</source><year>2008</year><volume>9</volume><issue>2</issue><fpage>317</fpage><lpage>323</lpage><pub-id pub-id-type="medline">34897242</pub-id></nlm-citation></ref><ref id="ref17"><label>17</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Milgram</surname><given-names>P</given-names> </name><name name-style="western"><surname>Kishino</surname><given-names>F</given-names> </name></person-group><article-title>A taxonomy of mixed reality visual displays</article-title><source>IEICE Trans Information Systems</source><year>1994</year><access-date>2025-02-04</access-date><issue>12</issue><fpage>1321</fpage><lpage>1329</lpage><comment><ext-link ext-link-type="uri" xlink:href="https://search.ieice.org/bin/summary.php?id=e77-d_12_1321">https://search.ieice.org/bin/summary.php?id=e77-d_12_1321</ext-link></comment></nlm-citation></ref><ref id="ref18"><label>18</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Bailenson</surname><given-names>JN</given-names> </name></person-group><article-title>Virtual reality</article-title><source>Open Encyclopedia of Cognitive Science</source><year>2024</year><month>07</month><day>24</day><access-date>2025-02-04</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://oecs.mit.edu/pub/2vci5sg1/release/1">https://oecs.mit.edu/pub/2vci5sg1/release/1</ext-link></comment></nlm-citation></ref><ref id="ref19"><label>19</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Son</surname><given-names>H</given-names> </name><name name-style="western"><surname>Ross</surname><given-names>A</given-names> </name><name name-style="western"><surname>Mendoza-Tirado</surname><given-names>E</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>LJ</given-names> </name></person-group><article-title>Virtual reality in clinical practice and research: viewpoint on novel applications for nursing</article-title><source>JMIR Nurs</source><year>2022</year><month>03</month><day>16</day><volume>5</volume><issue>1</issue><fpage>e34036</fpage><pub-id pub-id-type="doi">10.2196/34036</pub-id><pub-id pub-id-type="medline">35293870</pub-id></nlm-citation></ref><ref id="ref20"><label>20</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>van der Kruk</surname><given-names>SR</given-names> </name><name name-style="western"><surname>Zielinski</surname><given-names>R</given-names> </name><name name-style="western"><surname>MacDougall</surname><given-names>H</given-names> </name><name name-style="western"><surname>Hughes-Barton</surname><given-names>D</given-names> </name><name name-style="western"><surname>Gunn</surname><given-names>KM</given-names> </name></person-group><article-title>Virtual reality as a patient education tool in healthcare: A scoping review</article-title><source>Patient Educ Couns</source><year>2022</year><month>07</month><volume>105</volume><issue>7</issue><fpage>1928</fpage><lpage>1942</lpage><pub-id pub-id-type="doi">10.1016/j.pec.2022.02.005</pub-id></nlm-citation></ref><ref id="ref21"><label>21</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Arango</surname><given-names>S</given-names> </name><name name-style="western"><surname>Gorbaty</surname><given-names>B</given-names> </name><name name-style="western"><surname>Buyck</surname><given-names>D</given-names> </name><etal/></person-group><article-title>A free-access online interactive simulator to enhance perioperative transesophageal echocardiography training using a high-fidelity human heart 3D model</article-title><source>J Cardiothorac Vasc Anesth</source><year>2023</year><month>02</month><volume>37</volume><issue>2</issue><fpage>308</fpage><lpage>313</lpage><pub-id pub-id-type="doi">10.1053/j.jvca.2022.10.012</pub-id><pub-id pub-id-type="medline">36372718</pub-id></nlm-citation></ref><ref id="ref22"><label>22</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Iaizzo</surname><given-names>P</given-names> </name></person-group><source>Free Educational Tools</source><year>2025</year><access-date>2025-11-27</access-date><publisher-name>University of Minnesota</publisher-name><comment><ext-link ext-link-type="uri" xlink:href="https://med.umn.edu/vhlab/research/resources">https://med.umn.edu/vhlab/research/resources</ext-link></comment></nlm-citation></ref><ref id="ref23"><label>23</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Iaizzo</surname><given-names>PA</given-names> </name></person-group><source>Handbook of Cardiac Anatomy, Physiology, and Devices</source><year>2024</year><edition/><publisher-name>Springer Nature</publisher-name><fpage>1009</fpage><pub-id pub-id-type="doi">10.1007/978-3-031-72581-4</pub-id></nlm-citation></ref><ref id="ref24"><label>24</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Minouei</surname><given-names>MA</given-names> </name><name name-style="western"><surname>Omid</surname><given-names>A</given-names> </name><name name-style="western"><surname>Mirzaie</surname><given-names>A</given-names> </name><name name-style="western"><surname>Mahdavifard</surname><given-names>H</given-names> </name><name name-style="western"><surname>Rahimi</surname><given-names>A</given-names> </name></person-group><article-title>Effectiveness of virtual reality on medical students&#x2019; academic achievement in anatomy: systematic review</article-title><source>BMC Med Educ</source><year>2024</year><month>12</month><day>2</day><volume>24</volume><issue>1</issue><fpage>1407</fpage><pub-id pub-id-type="doi">10.1186/s12909-024-06402-1</pub-id><pub-id pub-id-type="medline">39623447</pub-id></nlm-citation></ref><ref id="ref25"><label>25</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Liu</surname><given-names>K</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>W</given-names> </name><name name-style="western"><surname>Li</surname><given-names>W</given-names> </name><name name-style="western"><surname>Wang</surname><given-names>T</given-names> </name><name name-style="western"><surname>Zheng</surname><given-names>Y</given-names> </name></person-group><article-title>Effectiveness of virtual reality in nursing education: a systematic review and meta-analysis</article-title><source>BMC Med Educ</source><year>2023</year><month>09</month><day>28</day><volume>23</volume><issue>1</issue><fpage>710</fpage><pub-id pub-id-type="doi">10.1186/s12909-023-04662-x</pub-id><pub-id pub-id-type="medline">37770884</pub-id></nlm-citation></ref><ref id="ref26"><label>26</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chauhan</surname><given-names>P</given-names> </name><name name-style="western"><surname>Mehra</surname><given-names>S</given-names> </name><name name-style="western"><surname>Pandya</surname><given-names>A</given-names> </name></person-group><article-title>Randomised controlled trial: role of virtual interactive 3-dimensional models in anatomical and medical education</article-title><source>J Vis Commun Med</source><year>2024</year><month>01</month><volume>47</volume><issue>1</issue><fpage>39</fpage><lpage>45</lpage><pub-id pub-id-type="doi">10.1080/17453054.2024.2352404</pub-id><pub-id pub-id-type="medline">38767329</pub-id></nlm-citation></ref><ref id="ref27"><label>27</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Garc&#x00ED;a-Robles</surname><given-names>P</given-names> </name><name name-style="western"><surname>Cort&#x00E9;s-P&#x00E9;rez</surname><given-names>I</given-names> </name><name name-style="western"><surname>Nieto-Esc&#x00E1;mez</surname><given-names>FA</given-names> </name><name name-style="western"><surname>Garc&#x00ED;a-L&#x00F3;pez</surname><given-names>H</given-names> </name><name name-style="western"><surname>Obrero-Gait&#x00E1;n</surname><given-names>E</given-names> </name><name name-style="western"><surname>Osuna-P&#x00E9;rez</surname><given-names>MC</given-names> </name></person-group><article-title>Immersive virtual reality and augmented reality in anatomy education: A systematic review and meta-analysis</article-title><source>Anat Sci Educ</source><year>2024</year><volume>17</volume><issue>3</issue><fpage>514</fpage><lpage>528</lpage><pub-id pub-id-type="doi">10.1002/ase.2397</pub-id><pub-id pub-id-type="medline">38344900</pub-id></nlm-citation></ref><ref id="ref28"><label>28</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Salimi</surname><given-names>S</given-names> </name><name name-style="western"><surname>Asgari</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Mohammadnejad</surname><given-names>A</given-names> </name><name name-style="western"><surname>Teimazi</surname><given-names>A</given-names> </name><name name-style="western"><surname>Bakhtiari</surname><given-names>M</given-names> </name></person-group><article-title>Efficacy of virtual reality and augmented reality in anatomy education: A systematic review and meta-analysis</article-title><source>Anat Sci Educ</source><year>2024</year><month>12</month><volume>17</volume><issue>9</issue><fpage>1668</fpage><lpage>1685</lpage><pub-id pub-id-type="doi">10.1002/ase.2501</pub-id><pub-id pub-id-type="medline">39300601</pub-id></nlm-citation></ref><ref id="ref29"><label>29</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Liao</surname><given-names>ML</given-names> </name><name name-style="western"><surname>Yeh</surname><given-names>CC</given-names> </name><name name-style="western"><surname>Lue</surname><given-names>JH</given-names> </name><name name-style="western"><surname>Chang</surname><given-names>MF</given-names> </name></person-group><article-title>Implementing virtual reality technology to teach medical college systemic anatomy: A pilot study</article-title><source>Anat Sci Educ</source><year>2024</year><month>06</month><volume>17</volume><issue>4</issue><fpage>796</fpage><lpage>805</lpage><pub-id pub-id-type="doi">10.1002/ase.2407</pub-id><pub-id pub-id-type="medline">38487974</pub-id></nlm-citation></ref><ref id="ref30"><label>30</label><nlm-citation citation-type="thesis"><person-group person-group-type="author"><name name-style="western"><surname>Odame</surname><given-names>A</given-names> </name></person-group><article-title>Virtual reality as a teaching tool in cardiac anatomy education</article-title><year>2021</year><access-date>2025-11-27</access-date><publisher-name>K&#x00F6;then, Germany]: Anhalt University of Applied Sciences</publisher-name><comment><ext-link ext-link-type="uri" xlink:href="https://www.hs-anhalt.de/fileadmin/Dateien/FB6/personen/tuemler_j/Master_Thesis_Angela_Odame.pdf">https://www.hs-anhalt.de/fileadmin/Dateien/FB6/personen/tuemler_j/Master_Thesis_Angela_Odame.pdf</ext-link></comment></nlm-citation></ref><ref id="ref31"><label>31</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Fair&#x00E9;n</surname><given-names>M</given-names> </name><name name-style="western"><surname>Moy&#x00E9;s</surname><given-names>J</given-names> </name><name name-style="western"><surname>Insa</surname><given-names>E</given-names> </name></person-group><article-title>VR4Health: personalized teaching and learning anatomy using VR</article-title><source>J Med Syst</source><year>2020</year><month>03</month><day>19</day><volume>44</volume><issue>5</issue><fpage>94</fpage><pub-id pub-id-type="doi">10.1007/s10916-020-01550-5</pub-id><pub-id pub-id-type="medline">32193612</pub-id></nlm-citation></ref><ref id="ref32"><label>32</label><nlm-citation citation-type="other"><person-group person-group-type="author"><name name-style="western"><surname>Chheang</surname><given-names>V</given-names> </name><name name-style="western"><surname>Sharmin</surname><given-names>S</given-names> </name><name name-style="western"><surname>Marquez-Hernandez</surname><given-names>R</given-names> </name><name name-style="western"><surname>Patel</surname><given-names>M</given-names> </name><name name-style="western"><surname>Rajasekaran</surname><given-names>D</given-names> </name><name name-style="western"><surname>Caulfield</surname><given-names>G</given-names> </name><etal/></person-group><article-title>Towards anatomy education with generative AI-based virtual assistants in immersive virtual reality environments</article-title><source>arXiv</source><comment>Preprint posted online on  Jun 29, 2023</comment><pub-id pub-id-type="doi">10.48550/arXiv.2306.17278</pub-id></nlm-citation></ref><ref id="ref33"><label>33</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Brooke</surname><given-names>J</given-names> </name></person-group><source>System Usability Scale (SUS)</source><access-date>2025-11-27</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.usability.gov/how-to-and-tools/methods/%20system-usability-scale.html">https://www.usability.gov/how-to-and-tools/methods/ system-usability-scale.html</ext-link></comment></nlm-citation></ref><ref id="ref34"><label>34</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tcha-Tokey</surname><given-names>K</given-names> </name><name name-style="western"><surname>Christmann</surname><given-names>O</given-names> </name><name name-style="western"><surname>Loup-Escande</surname><given-names>E</given-names> </name><name name-style="western"><surname>Richir</surname><given-names>S</given-names> </name></person-group><article-title>Proposition and validation of a questionnaire to measure the user experience in immersive virtual environments</article-title><source>IJVR</source><year>2016</year><volume>16</volume><issue>1</issue><fpage>33</fpage><lpage>48</lpage><pub-id pub-id-type="doi">10.20870/IJVR.2016.16.1.2880</pub-id></nlm-citation></ref><ref id="ref35"><label>35</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sim&#x00F3;n-Vicente</surname><given-names>L</given-names> </name><name name-style="western"><surname>Rodr&#x00ED;guez-Cano</surname><given-names>S</given-names> </name><name name-style="western"><surname>Delgado-Benito</surname><given-names>V</given-names> </name><name name-style="western"><surname>Aus&#x00ED;n-Villaverde</surname><given-names>V</given-names> </name><name name-style="western"><surname>Cubo Delgado</surname><given-names>E</given-names> </name></person-group><article-title>Cybersickness. A systematic literature review of adverse effects related to virtual reality</article-title><source>Neurologia (Engl Ed)</source><year>2024</year><month>10</month><volume>39</volume><issue>8</issue><fpage>701</fpage><lpage>709</lpage><pub-id pub-id-type="doi">10.1016/j.nrleng.2022.04.007</pub-id><pub-id pub-id-type="medline">39396266</pub-id></nlm-citation></ref><ref id="ref36"><label>36</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lee</surname><given-names>J</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>CC</given-names> </name><name name-style="western"><surname>Basu</surname><given-names>A</given-names> </name></person-group><article-title>From novelty to knowledge: a longitudinal investigation of the novelty effect on learning outcomes in virtual reality</article-title><source>IEEE Trans Vis Comput Graph</source><year>2025</year><month>05</month><volume>31</volume><issue>5</issue><fpage>3204</fpage><lpage>3212</lpage><pub-id pub-id-type="doi">10.1109/TVCG.2025.3549897</pub-id><pub-id pub-id-type="medline">40063496</pub-id></nlm-citation></ref></ref-list><app-group><supplementary-material id="app1"><label>Multimedia Appendix 1</label><p>Information sheet.</p><media xlink:href="xr_v2i1e74429_app1.docx" xlink:title="DOCX File, 17 KB"/></supplementary-material><supplementary-material id="app2"><label>Multimedia Appendix 2</label><p>Implementation survey.</p><media xlink:href="xr_v2i1e74429_app2.docx" xlink:title="DOCX File, 19 KB"/></supplementary-material><supplementary-material id="app3"><label>Multimedia Appendix 3</label><p>Virtual Reality app implementation survey (Likert Style Table).</p><media xlink:href="xr_v2i1e74429_app3.xlsx" xlink:title="XLSX File, 73 KB"/></supplementary-material></app-group></back></article>