@inbook{d0192d4bc002483d83d9a29dc76d9cef,
title = "Fine-Tuning Generative Pre-Trained Transformers for Clinical Dialogue Summarization",
abstract = "Automated clinical dialogue summarization can help make health professional workflows more efficient. With the advent of large language models, machine learning can be used to provide accurate and efficient summarization tools. Generative Pre-Trained Transformers (GPT) have shown huge promise in this area. While larger GPT models, such as GPT-4, have been used, these models pose their own problems in terms of precision and expense. Fine-tuning smaller models can lead to more accurate results with less computational expense. In this paper, we fine-tune a GPT-3.5 model to summarize clinical dialogue. We use both default hyperparameters along with manual hyperparameters for comparison purposes. We also compare our default model to past work using ROUGE-1, ROUGE-2, ROUGE-L, and BERTScores. We find our model outperforms GPT-4 across all measures. As our fine-tuning process is based on the smaller GPT-3.5 model, we show that fine-tuning leads to more accurate and less expensive results. Informal human observation also reveals our notes to be of acceptable quality.",
keywords = "Data augmentation, Machine translation, Parameter tuning, Synthetic data generation, Transformers",
author = "Isabel Ronan and Sabin Tabirca",
note = "Publisher Copyright: {\textcopyright} 2024 IEEE.; 2024 International Conference on Frontiers of Information Technology, FIT 2024 ; Conference date: 09-12-2024 Through 10-12-2024",
year = "2024",
doi = "10.1109/FIT63703.2024.10838420",
language = "English",
series = "2024 International Conference on Frontiers of Information Technology, FIT 2024",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "2024 International Conference on Frontiers of Information Technology, FIT 2024",
address = "United States",
}