@inproceedings{8bc73c7a95194318bc03d1522ebc03b3,
title = "Decentralized Federated Large Model Tuning over Edge Networks with Zeroth-Order Optimization",
abstract = "Federated tuning of large models is an emerging paradigm that pushes the promising generative AI services into the network edge. However, as model sizes scale up, the conflicts between their intensive resource demands and the naturally limited resource at edge networks significantly constrained the performance of tuning large models over edge networks. In view of these, we propose a fully decentralized federated large model tuning framework with zeroth-order (ZO) optimization, addressing the significant computation and communication costs during the federated learning (FL) process. The proposed framework offers superior performance in dynamic and infrastructure-less edge networks with a theoretical convergence guarantee. Extensive experiments demonstrate the efficacy and outperformance of the proposed framework regarding the communication efficiency and robustness performance.",
keywords = "communication efficiency, decentralized learning, Federated learning, large model tuning, zeroth-order optimization",
author = "Zihan Chen and Jihong Park and Boxiang He and Yanli Yuan and Yang, \{Howard H.\}",
note = "Publisher Copyright: {\textcopyright} 2025 IEEE.; 2025 IEEE/CIC International Conference on Communications in China, ICCC 2025 ; Conference date: 10-08-2025 Through 13-08-2025",
year = "2025",
doi = "10.1109/ICCC65529.2025.11148609",
language = "English",
series = "2025 IEEE/CIC International Conference on Communications in China:Shaping the Future of Integrated Connectivity, ICCC 2025",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "2025 IEEE/CIC International Conference on Communications in China:Shaping the Future of Integrated Connectivity, ICCC 2025",
address = "United States",
}