% This file was created with JabRef 2.3.1. % Encoding: UTF-8 @MASTERSTHESIS{Buron2005, author = {Florian Buron}, title = {High-Resolution Three-Dimensional Sensing of Fast Deforming Objects}, school = {Stanford University}, year = {2005}, owner = {fongpwf}, timestamp = {2007.05.17} } @ARTICLE{Fong2009, author = {Philip Fong}, title = {Sensing, Acquisition, and Interactive Playback of Data-based Models for Elastic Deformable Objects}, journal = {International Journal of Robotics Research}, year = {2009}, volume = {28}, pages = {622-629}, number = {5}, month = {May}, owner = {fongpwf}, timestamp = {2009.10.03} } @PHDTHESIS{Fong2007, author = {Philip Fong}, title = {Data-based Models for Deformable Objects: Sensing, Acquisition, and Interactive Playback}, school = {Stanford University}, year = {2007}, month = {June}, owner = {fongpwf}, timestamp = {2008.05.24}, url = {http://robotics.stanford.edu/~fongpwf/thesis_top_comp.pdf} } @INPROCEEDINGS{Fong3DSensorIros, author = {Fong, Philip and Buron, Florian}, title = {High-resolution three-dimensional sensing of fast deforming objects}, booktitle = {Intelligent Robots and Systems. ({IROS} 2005). 2005 {IEEE}/{RSJ} International Conference on}, year = {2005}, pages = {1606--1611}, month = aug, abstract = {In applications like motion capture, high speed collision testing and robotic manipulation of deformable objects there is a critical need for capturing the 3D geometry of fast moving and/or deforming objects. Although there exists many 3D sensing techniques, most cannot deal with dynamic scenes (e.g., laser scanning). Others, like stereovision, require that object surfaces be appropriately textured. Few, if any, build high- resolution 3D models of dynamic scenes. This paper presents a technique to compute high-resolution range maps from single images of moving and deforming objects. This method is based on observing the deformation of a projected light pattern that combines a set of parallel colored stripes and a perpendicular set of sinusoidal intensity stripes. While the colored stripes allow the sensor to compute absolute depths at coarse resolution, the sinusoidal intensity stripes give dense relative depths. This twofold pattern makes it possible to extract a high-resolution range map from each image in a video sequence. The sensor has been implemented and tested on several deforming objects.}, doi = {10.1109/IROS.2005.1545602}, keywords = {computer vision, image motion analysis, object detection}, owner = {fongpwf}, timestamp = {2009.10.03}, url = {http://robotics.stanford.edu/~fongpwf/3dcampaper_iros.pdf} } @INPROCEEDINGS{Fong3dSensorProcams2005, author = {Fong, Philip and Buron, Florian}, title = {Sensing Deforming and Moving Objects with Commercial Off the Shelf Hardware}, booktitle = {Computer Vision and Pattern Recognition (CVPR). 2005 IEEE Computer Society Conference on}, year = {2005}, volume = {3}, pages = {101--108}, abstract = {In many application areas, there exists a crucial need for capturing 3D videos of fast moving and/or deforming objects. A 3D video is a sequence of 3D representations at high time and space resolution. Although many 3D sensing techniques are available, most cannot deal with dynamic scenes (e.g. laser scanning), can only deal with textured surfaces (e.g. stereo vision) and/or require expensive specialized hardware. This paper presents a technique to compute high-resolution range maps from single images of moving and deformable objects. A camera observes the deformation of a projected light pattern that combines a set of parallel colored stripes and a perpendicular set of sinusoidal intensity stripes. While the colored stripes allow recovering absolute depths at coarse resolution, the sinusoidal intensity stripes give dense relative depths. This twofold pattern makes it possible to extract a high-resolution range map from each image captured by the camera. This approach is based on sound mathematical principles, but its implementation requires giving great care to a number of low-level details. In particular, the sensor has been implemented using commercial off the shelf hardware, which distorts sensed and transmitted signals in many ways. A novel method was developed to characterize and compensate for distortions due to chromatic aberrations. The sensor has been tested on several moving and deforming objects.}, doi = {10.1109/CVPR.2005.524}, issn = {1063-6919}, owner = {fongpwf}, timestamp = {2007.05.17}, url = {http://robotics.stanford.edu/~fongpwf/3d_sensor_procams.pdf} } @CONFERENCE{Fong2005, author = {Philip Fong and Alan Teruya and Mark Lowry}, title = {Characterization and Compensation of High Speed Digitizers}, booktitle = {IEEE Instrumentation and Measurement Technology Conference (IMTC)}, year = {2005}, address = {Ottawa, Canada}, month = {May}, owner = {fongpwf}, timestamp = {2009.10.03}, url = {http://robotics.stanford.edu/~fongpwf/IMTC_characterization_comp.pdf} } @CONFERENCE{Kegelmeyer2007, author = {L.M. Kegelmeyer and P. Fong and S.M. Glenn and J. Liebman}, title = {Local Area Signal-to-Noise Ratio (LASNR) algorithm for Image Segmentation}, booktitle = {SPIE: Applications of Digital Image Processing XXX}, year = {2007}, address = {San Diego}, month = {August}, owner = {fongpwf}, timestamp = {2009.10.03}, url = {http://robotics.stanford.edu/~fongpwf/LASNR11_spie_6696-85-o.pdf} } @ARTICLE{Thrun2006, author = {S. Thrun and M. Montemerlo and H. Dahlkamp and D. Stavens and A. Aron and J. Diebel and P. Fong and J. Gale and M. Halpenny and G. Hoffmann and K. Lau and C. Oakley and M. Palatucci and V. Pratt and P. Stang and S. Strohband and C. Dupont and L.-E. Jendrossek and C. Koelen and C. Markey and C. Rummel and J. van Niekerk and E. Jensen and P. Alessandrini and G. Bradski and B. Davies and S. Ettinger and A. Kaehler and A. Nefian and P. Mahoney}, title = {Winning the DARPA Grand Challenge}, journal = {Journal of Field Robotics}, year = {2006}, volume = {23}, number = {9}, owner = {fongpwf}, timestamp = {2009.10.03}, url = {http://robots.stanford.edu/papers/thrun.stanley05.pdf} } @comment{jabref-meta: selector_publisher:} @comment{jabref-meta: selector_author:} @comment{jabref-meta: selector_journal:} @comment{jabref-meta: selector_keywords:}