@unpublished{DurallLopezPfreundtKeuper2020, author = {Ricard Durall Lopez and Franz-Josef Pfreundt and Janis Keuper}, title = {Local Facial Attribute Transfer through Inpainting}, institution = {Fakult{\"a}t Elektrotechnik, Medizintechnik und Informatik (EMI) (ab 04/2019)}, pages = {8}, year = {2020}, abstract = {The term attribute transfer refers to the tasks of altering images in such a way, that the semantic interpretation of a given input image is shifted towards an intended direction, which is quantified by semantic attributes. Prominent example applications are photo realistic changes of facial features and expressions, like changing the hair color, adding a smile, enlarging the nose or altering the entire context of a scene, like transforming a summer landscape into a winter panorama. Recent advances in attribute transfer are mostly based on generative deep neural networks, using various techniques to manipulate images in the latent space of the generator. In this paper, we present a novel method for the common sub-task of local attribute transfers, where only parts of a face have to be altered in order to achieve semantic changes (e.g. removing a mustache). In contrast to previous methods, where such local changes have been implemented by generating new (global) images, we propose to formulate local attribute transfers as an inpainting problem. Removing and regenerating only parts of images, our Attribute Transfer Inpainting Generative Adversarial Network (ATI-GAN) is able to utilize local context information to focus on the attributes while keeping the background unmodified resulting in visually sound results.}, language = {en} }