@inproceedings{43083d557a2149cc82205008695c848d,
title = "Predicting the B-H Loops of Power Magnetics with Transformer-based Encoder-Projector-Decoder Neural Network Architecture",
abstract = "This paper presents a transformer-based encoder-projector-decoder neural network architecture for modeling power magnetics B-H hysteresis loops. The transformer-based encoder-decoder network architecture maps a flux density excitation waveform (B) into the corresponding magnetic field strength (H) waveform. The predicted B-H loop can be used to estimate the core loss and support magnetics-in-circuit simulations. A projector is added between the transformer encoder and decoder to capture the impact of other inputs such as frequency, temperature, and dc bias. An example transformer neural network is designed, trained, and tested to prove the effectiveness of the proposed architecture.",
keywords = "data-driven method, hysteresis loop, machine learning, neural network, power magnetics, transformer",
author = "Haoran Li and Diego Serrano and Shukai Wang and Thomas Guillod and Min Luo and Minjie Chen",
note = "Publisher Copyright: {\textcopyright} 2023 IEEE.; 38th Annual IEEE Applied Power Electronics Conference and Exposition, APEC 2023 ; Conference date: 19-03-2023 Through 23-03-2023",
year = "2023",
doi = "10.1109/APEC43580.2023.10131497",
language = "English (US)",
series = "Conference Proceedings - IEEE Applied Power Electronics Conference and Exposition - APEC",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "1543--1550",
booktitle = "APEC 2023 - 38th Annual IEEE Applied Power Electronics Conference and Exposition",
address = "United States",
}