@misc{c43a994faaef4f2f96e2fb9a05e19c68,
title = "A Hybrid Reinforcement Learning-MPC Approach for Distribution System Critical Load Restoration",
abstract = "This paper proposes a hybrid control approach for distribution system critical load restoration, combining deep reinforcement learning (RL) and model predictive control (MPC) aiming at maximizing total restored load following an extreme event. RL determines a policy for quantifying operating reserve requirements, thereby hedging against uncertainty, while MPC models grid operations incorporating RL policy actions (i.e., reserve requirements), renewable (wind and solar) power predictions, and load demand forecasts. We formulate the reserve requirement determination problem as a sequential decision-making problem based on the Markov Decision Process (MDP) and design an RL learning environment based on the OpenAI Gym framework and MPC simulation. The RL agent reward and MPC objective function aim to maximize and monotonically increase total restored load and minimize load shedding and renewable power curtailment. The RL algorithm is trained offline using a historical forecast of renewable generation and load demand. The method is tested using a modified IEEE 13-bus distribution test feeder containing wind turbine, photovoltaic, microturbine, and battery. Case studies demonstrated that the proposed method outperforms other policies with static operating reserves.",
keywords = "distribution system, model predictive control, operating reserve, reinforcement learning, restoration",
author = "Abinet Eseye and Xiangyu Zhang and Bernard Knueven and Matthew Reynolds and Weijia Liu and Wesley Jones",
year = "2022",
language = "American English",
series = "Presented at the 2022 IEEE Power & Energy Society General Meeting, 17-21 July 2022, Denver, Colorado",
type = "Other",
}