@inproceedings{556c3d202d2e44aa91eadaf5905a4916,
title = "Human video textures",
abstract = "This paper describes a data-driven approach for generating photorealistic animations of human motion. Each animation sequence follows a user-choreographed path and plays continuously by seamlessly transitioning between different segments of the captured data. To produce these animations, we capitalize on the complementary characteristics of motion capture data and video. We customize our capture system to record motion capture data that are synchronized with our video source. Candidate transition points in video clips are identified using a new similarity metric based on 3-D marker trajectories and their 2-D projections into video. Once the transitions have been identified, a video-based motion graph is constructed. We further exploit hybrid motion and video data to ensure that the transitions are seamless when generating animations. Motion capture marker projections serve as control points for segmentation of layers and nonrigid transformation of regions. This allows warping and blending to generate seamless in-between frames for animation. We show a series of choreographed animations of walks and martial arts scenes as validation of our approach.",
keywords = "Image-based rendering, Layered motion, Motion capture",
author = "Matthew Flagg and Atsushi Nakazawa and Qiushuang Zhang and Kang, {Sing Bing} and Ryu, {Young Kee} and Irfan Essa and Rehg, {James M.}",
year = "2009",
doi = "10.1145/1507149.1507182",
language = "English (US)",
isbn = "9781605584294",
series = "Proceedings of I3D 2009: The 2009 ACM SIGGRAPH Symposium on Interactive 3D Graphics and Games",
pages = "199--206",
booktitle = "Proceedings of I3D 2009",
note = "2009 ACM SIGGRAPH Symposium on Interactive 3D Graphics and Games, I3D 2009 ; Conference date: 27-02-2009 Through 01-03-2009",
}