@inproceedings{4a9bc16a843546639a6f6eaa9afd6983,
title = "Fine-tuning of LLMs for HeXie Management Theory",
abstract = "HeXie Management Theory (HXMT) has been used in many applications. Those applications have demonstrated the effectiveness of the theory in responding to management challenges by integrating oriental and occidental wisdom. With its adoption's complexity, dynamics, and flexibility, a revolutionary method needs to be developed to simplify it more broadly. Large Language Models (LLMs) have shown their compelling ability to generate human-like content with their chat-based paradigm. Many specifically trained LLMs have demonstrated success in their dedicated application domains. This paper reports the study of fine-tuning LLMs using a specialized dataset derived from the HeXie management theory. Two models were built on Baidu's Qianfan platform and were adapted for Chinese text. Four criteria were used to evaluate the performances. This study provides an example of fine-tuning LLMs for a Chinese text-based specific theory and building a domain-specific intelligent agent using LLMs or HXMT, which is available at https://alex17swim.com/chat/",
keywords = "Fine-tune, HeXie Management Theory, Large Language Models, RAG",
author = "Lisirui Tang and Chengyu Wang and Gangmin Li and Peng Liu",
note = "Publisher Copyright: {\textcopyright} 2024 ACM.; 6th International Conference on Pattern Recognition and Intelligent Systems, PRIS 2024 ; Conference date: 26-07-2024",
year = "2024",
month = oct,
day = "3",
doi = "10.1145/3689218.3689235",
language = "English",
series = "ACM International Conference Proceeding Series",
publisher = "Association for Computing Machinery",
pages = "69--73",
editor = "Wenbing Zhao and Yonghong Peng and Yulin Wang",
booktitle = "Proceedings - 2024 6th International Conference on Pattern Recognition and Intelligent Systems, PRIS 2024",
}