@inproceedings{e506274ade104014a6063865040798f1,
title = "Adaptive power management using reinforcement learning",
abstract = "System level power management must consider the uncertainty and variability that comes from the environment, the application and the hardware. A robust power management technique must be able to learn the optimal decision from past history and improve itself as the environment changes. This paper presents a novel online power management technique based on model-free constrained reinforcement learning (RL). It learns the best power management policy that gives the minimum power consumption for a given performance constraint without any prior information of workload. Compared with existing machine learning based power management techniques, the RL based learning is capable of exploring the trade-off in the power-performance design space and converging to a better power management policy. Experimental results show that the proposed RL based power management achieves 24% and 3% reduction in power and latency respectively comparing to the existing expert based power management.",
keywords = "Model-free, Power management, Q-learning, Reinforcement learning",
author = "Ying Tan and Wei Liu and Qinru Qiu",
year = "2009",
doi = "10.1145/1687399.1687486",
language = "English (US)",
isbn = "9781605588001",
series = "IEEE/ACM International Conference on Computer-Aided Design, Digest of Technical Papers, ICCAD",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "461--467",
booktitle = "Proceedings of the 2009 IEEE/ACM International Conference on Computer-Aided Design - Digest of Technical Papers, ICCAD 2009",
note = "2009 IEEE/ACM International Conference on Computer-Aided Design, ICCAD 2009 ; Conference date: 02-11-2009 Through 05-11-2009",
}