@unpublished{DurallLopezChatzimichailidisLabusetal.2020, author = {Ricard Durall Lopez and Avraam Chatzimichailidis and Peter Labus and Janis Keuper}, title = {Combating Mode Collapse in GAN training: An Empirical Analysis using Hessian Eigenvalues}, institution = {Fakult{\"a}t Elektrotechnik, Medizintechnik und Informatik (EMI) (ab 04/2019)}, pages = {9}, year = {2020}, abstract = {Generative adversarial networks (GANs) provide state-of-the-art results in image generation. However, despite being so powerful, they still remain very challenging to train. This is in particular caused by their highly non-convex optimization space leading to a number of instabilities. Among them, mode collapse stands out as one of the most daunting ones. This undesirable event occurs when the model can only fit a few modes of the data distribution, while ignoring the majority of them. In this work, we combat mode collapse using second-order gradient information. To do so, we analyse the loss surface through its Hessian eigenvalues, and show that mode collapse is related to the convergence towards sharp minima. In particular, we observe how the eigenvalues of the G are directly correlated with the occurrence of mode collapse. Finally, motivated by these findings, we design a new optimization algorithm called nudged-Adam (NuGAN) that uses spectral information to overcome mode collapse, leading to empirically more stable convergence properties.}, language = {en} }