@inproceedings{066fa59c93264e35990cf8eab64a6c13,
title = "Unleashing the Retrieval Potential of Large Language Models in Conversational Recommender Systems",
abstract = "Conversational recommender systems (CRSs) aim to capture user preferences and provide personalized recommendations through interactive natural language interaction. The recent advent of large language models (LLMs) has revolutionized human engagement in natural conversation, driven by their extensive world knowledge and remarkable natural language understanding and generation capabilities. However, introducing LLMs into CRSs presents new technical challenges. Directly prompting LLMs for recommendation generation requires understanding a large and evolving item corpus, as well as grounding the generated recommendations in the real item space. On the other hand, generating recommendations based on external recommendation engines or directly integrating their suggestions into responses may constrain the overall performance of LLMs, since these engines generally have inferior representation abilities compared to LLMs. To address these challenges, we propose an end-to-end large-scale CRS model, named as ReFICR, a novel LLM-enhanced conversational recommender that empowers a retrievable large language model to perform conversational recommendation by following retrieval and generation instructions through lightweight tuning. By decomposing the complex CRS task into multiple subtasks, we formulate these subtasks into two types of instruction formats: retrieval and generation. The hidden states of ReFICR are utilized for generating text embeddings for retrieval, and simultaneously ReFICR is fine-tuned to handle generation subtasks. We optimize the contrastive objective to enhance text embeddings for retrieval and jointly fine-tune the large language model objective for generation. Our experimental results on public datasets demonstrate that ReFICR significantly outperforms baselines in terms of recommendation accuracy and response quality. Our code is publicly available at the link: https://github.com/yt556677/ReFICR.",
keywords = "Conversational Recommender Systems, Instruction Tuning, Retrievable Large Language Models",
author = "Ting Yang and Li Chen",
note = "Funding Information: This work is supported by Hong Kong Baptist University IG-FNRA project (RC-FNRA-IG/21-22/SCI/01) and Key Research Partnership Scheme (KRPS/23-24/02). Publisher copyright: {\textcopyright} 2024 Copyright held by the owner/author(s).",
year = "2024",
month = oct,
day = "8",
doi = "10.1145/3640457.3688146",
language = "English",
isbn = "9798400705052",
series = "RecSys 2024 - Proceedings of the 18th ACM Conference on Recommender Systems",
publisher = "Association for Computing Machinery (ACM)",
pages = "43--52",
editor = "{Di Noia}, Tommaso and Pasquale Lops and Thorsten Joachims and Katrien Verbert and Pablo Castells and Zhenhua Dong and Ben London",
booktitle = "RecSys '24: Proceedings of the 18th ACM Conference on Recommender Systems",
address = "United States",
}