@inproceedings{b7ec6d994da34999a73224ee6f6ad71e,
title = "Energy Storage Scheduling for Cost Minimization Using Deep Q-Learning",
abstract = "This paper presents a Discrete-Action Deep Q-Network (DQN) approach for scheduling Energy Storage Systems (ESS) to optimize energy costs for residential households. The proposed model normalizes the state based on the maximum allowable actions defined by the policy, enabling effective state representation and decision-making. Simulation results validate the superiority of the proposed method, which achieves cost savings of 43\% compared to a baseline with no rule-based strategy and 21\% more than the average rule-based approach. Additionally, the proposed discrete-action DQN approach demonstrates an 11\% improvement in cost efficiency over a three-level scheduling DQN method, highlighting the benefits of action discretization. These findings underscore the potential of the proposed approach to enhance energy management in smart grids, effectively optimizing storage system contributions for residential households while significantly reducing energy costs.",
keywords = "Deep Q-Learning, Energy Management, Optimization, Reinforcement Learning",
author = "Yaju Rajbhandari and Barry Hayes",
note = "Publisher Copyright: {\textcopyright} 2025 IEEE.; 2025 IEEE Kiel PowerTech, PowerTech 2025 ; Conference date: 29-06-2025 Through 03-07-2025",
year = "2025",
doi = "10.1109/PowerTech59965.2025.11180258",
language = "English",
series = "2025 IEEE Kiel PowerTech, PowerTech 2025",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "2025 IEEE Kiel PowerTech, PowerTech 2025",
address = "United States",
}