by Alexander, Oleg, Fyffe, Graham, Busch, Jay, Yu, Xueming, Ichikari, Ryosuke, Jones, Andrew, Debevec, Paul, Jimenez, Jorge, Danvoye, Etienne, Antionazzi, Bernardo, Eheler, Mike, Kysela, Zybnek and von der Pahlen, Javier
Abstract:
In 2008, the "Digital Emily" project [Alexander et al. 2009] showed how a set of high-resolution facial expressions scanned in a light stage could be rigged into a real-time photoreal digital character and driven with video-based facial animation techniques. However, Digital Emily was rendered offline, involved just the front of the face, and was never seen in a tight closeup. In this collaboration between Activision and USC ICT shown at SIGGRAPH 2013's Real-Time Live venue, we endeavoured to create a real-time, photoreal digital human character which could be seen from any viewpoint, in any lighting, and could perform realistically from video performance capture even in a tight closeup. In addition, we wanted this to run in a real-time game-ready production pipeline, ultimately achieving 180 frames per second for a full-screen character on a two-year old graphics card.
Reference:
Digital Ira: Creating a Real-Time Photoreal Digital Actor (Alexander, Oleg, Fyffe, Graham, Busch, Jay, Yu, Xueming, Ichikari, Ryosuke, Jones, Andrew, Debevec, Paul, Jimenez, Jorge, Danvoye, Etienne, Antionazzi, Bernardo, Eheler, Mike, Kysela, Zybnek and von der Pahlen, Javier), In SIGGRAPH Real Time Live!, 2013.
Bibtex Entry:
@inproceedings{alexander_digital_2013,
address = {Anaheim, CA},
title = {Digital {Ira}: {Creating} a {Real}-{Time} {Photoreal} {Digital} {Actor}},
isbn = {978-1-4503-2342-0},
shorttitle = {Digital {Ira}},
url = {http://dl.acm.org/citation.cfm?doid=2503385.2503387},
doi = {10.1145/2503385.2503387},
abstract = {In 2008, the "Digital Emily" project [Alexander et al. 2009] showed how a set of high-resolution facial expressions scanned in a light stage could be rigged into a real-time photoreal digital character and driven with video-based facial animation techniques. However, Digital Emily was rendered offline, involved just the front of the face, and was never seen in a tight closeup. In this collaboration between Activision and USC ICT shown at SIGGRAPH 2013's Real-Time Live venue, we endeavoured to create a real-time, photoreal digital human character which could be seen from any viewpoint, in any lighting, and could perform realistically from video performance capture even in a tight closeup. In addition, we wanted this to run in a real-time game-ready production pipeline, ultimately achieving 180 frames per second for a full-screen character on a two-year old graphics card.},
booktitle = {{SIGGRAPH} {Real} {Time} {Live}!},
author = {Alexander, Oleg and Fyffe, Graham and Busch, Jay and Yu, Xueming and Ichikari, Ryosuke and Jones, Andrew and Debevec, Paul and Jimenez, Jorge and Danvoye, Etienne and Antionazzi, Bernardo and Eheler, Mike and Kysela, Zybnek and von der Pahlen, Javier},
month = jul,
year = {2013},
keywords = {Graphics, UARC}
}