@ARTICLE{ijrr12,
  author = {Dejan Pangercic and Moritz Tenorth and Nico Blodow and Dominik Jain and Zoltan-Csaba Marton and Benjamin Pitzer and Thomas Ruhr and Martin Schuster and Juergen Sturm and Michael Beetz},
  title = {Semantic Object Maps for Robotic Housework - Representation, Acquisition and Use},
  journal = {submitted to the Special Issue "Semantic Perception, Mapping and Exploration" in the Journal of Robotics and Autonomous Systems},
  year = {2012}
}

@INPROCEEDINGS{iros12a,
  author = {Philip Roan and Nikhil Deshpande and Yizhou Wang and Benjamin Pitzer},
  title = {Manipulator State Estimation with Low Cost Accelerometers and Gyroscopes},
  booktitle = {IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
  file = {pdf:roan12_iros.pdf},
  year = {2012}
}

@INPROCEEDINGS{iros12b,
  author = {Bharath Sankaran and Benjamin Pitzer and Sarah Osentoski},
  title = {Failure Recovery with Shared Autonomy},
  booktitle = {IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
  file = {pdf:sankaran12_iros.pdf},
  year = {2012}
}

@INPROCEEDINGS{iros12c,
  author = {Dejan Pangercic and Moritz Tenorth and Benjamin Pitzer and Michael Beetz},
  title = {Semantic Object Maps for Robotic Housework - Representation, Acquisition and Use},
  booktitle = {IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
  file = {pdf:pangercic12_iros.pdf},
  year = {2012}
}

@ARTICLE{ijsr12,
  author = {Osentoski, Sarah and Pitzer, Benjamin and Crick, Christopher and Jay, Graylin and Dong, Shuonan and Grollman, Daniel and Suay, Halit and Jenkins, Odest},
  affiliation = {Bosch Research and Technology Center, 4005 Miranda Ave, Palo Alto, CA 94304, USA},
  title = {Remote Robotic Laboratories for Learning from Demonstration},
  journal = {International Journal of Social Robotics},
  publisher = {Springer Netherlands},
  issn = {1875-4791},
  keyword = {Engineering},
  pages = {1-13},
  url = {http://dx.doi.org/10.1007/s12369-012-0157-8},
  note = {10.1007/s12369-012-0157-8},
  abstract = {This paper documents the technology developed during the creation of the PR2 Remote Lab and the process of using it for shared development for Learning from Demonstration. Remote labs enable a larger and more diverse group of researchers to participate directly in state-of-the-art robotics research and will improve the reproducibility and comparability of robotics experiments. We present solutions to interface, control, and design difficulties in the client and server-side software when implementing a remote laboratory architecture. We describe how researchers can interact with the PR2 and its environment remotely through a web interface, as well as develop similar interfaces to visualize and run experiments remotely. Additionally, we describe how the remote lab technology was used by researchers participating in the Robot Learning from Demonstration Challenge (LfD) held in conjunction with the AAAI-11 Conference on Artificial Intelligence. Teams from three institutions used the remote lab as their primary development and testing platform. This paper reviews the process as well as providing observations and lessons learned.},
  file = {pdf:osentoski12_ijsr.pdf},
  year = {2012}
}

@inproceedings{robotik12,
  author = {Vorndamme, J. and Petereit, S. and Pitzer, B. and Roan, P. and Lilge, T. and Albert, A.},
  title = {Robotic System for Mapping 3D in-wall Information for Craftsmen},
  booktitle = {Proceedings of the 7th German Conference on Robotics (ROBOTIK 2012)},
  address = {Munich, Germany},
  publisher = {VDE Verlag, Berlin},
  year = {2012},
  url = {http://www.vde-verlag.de/proceedings-de/453418005.html},
  file = {pdf:vorndamme12_robotik.pdf},
}

@INPROCEEDINGS{icra12, 
  author={Pitzer, Benjamin and Osentoski, Sarah and Jay, Graylin and Crick, Christopher and Jenkins, Odest Chadwicke}, 
  booktitle={IEEE International Conference on Robotics and Automation (ICRA)}, title={PR2 Remote Lab: An environment for remote development and experimentation}, 
  year={2012}, 
  month={may}, 
  volume={}, 
  number={}, 
  pages={3200-3205}, 
  abstract={In this paper, we describe a remote lab system that allows remote groups to access a shared PR2. This lab will enable a larger and more diverse group of researchers to participate directly in state-of-the-art robotics research and will improve the reproducibility and comparability of robotics experiments. We identify a set of requirements that apply to all web-based remote laboratories and focus on solutions to these requirements. Specifically, we present solutions to interface, control and design difficulties in the client and server-side software when implementing a remote laboratory architecture. The combination of shared physical hardware and shared middleware software allows for experiments that build upon and compare against results on the same platform and in the same environment for common tasks. We describe how researchers can interact with the PR2 and its environment remotely through a web interface, as well as develop similar interfaces to visualize and run experiments remotely.}, 
  keywords={}, 
  doi={10.1109/ICRA.2012.6224653}, 
  ISSN={1050-4729},
  file = {pdf:pitzer12_icra.pdf},
}

@INPROCEEDINGS{iros11a,
  author = {Christian Bersch and Benjamin Pitzer and Soeren Kammel},
  title = {Bimanual Robotic Cloth Manipulation for Laundry Folding},
  booktitle = {IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
  year = {2011},
  file = {pdf:bersch11_iros.pdf},
}  

@INPROCEEDINGS{iros11b,
  author = {Benjamin Pitzer and Sarah Osentoski and Philip Roan and Christian Bersch and Jan Becker},
  title = {Making robots cheaper, more capable, and safer},
  booktitle = {The PR2 Workshop, IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
  year = {2011},
  file = {pdf:pitzer11_iros.pdf},
}  
 
@INPROCEEDINGS{icra11a,
  author = {Benjamin Pitzer and Michael Styer and Christian Bersch and Charles DuHadway and Jan Becker},
  title = {Towards Perceptual Shared Autonomy for Robotic Mobile Manipulation},
  booktitle = {IEEE International Conference on Robotics & Automation (ICRA)},
  year = {2011},
  file = {pdf:pitzer11_icra.pdf},
}  

@INPROCEEDINGS{icra11b,
  author = {Sarah Osentoski and Graylin Jay and Christopher Crick and Benjamin Pitzer and Charles DuHadway and Odest Chadwicke Jenkins},
  title = {Robots as web services: Reproducible experimentation and application development using rosjs},
  booktitle = {IEEE International Conference on Robotics & Automation (ICRA)},
  year = {2011},
  file = {pdf:osentoski11_icra.pdf},
}

@INPROCEEDINGS{icra10a, 
  author={Pitzer, B. and Stiller, C.}, 
  booktitle={IEEE International Conference on Robotics and Automation (ICRA)}, 
  title={Probabilistic mapping for mobile robots using spatial correlation models}, 
  year={2010}, 
  month={May}, 
  volume={}, 
  number={}, 
  pages={5402--5409}, 
  abstract={Generating accurate environment representations can significantly improve the autonomy of mobile robots. In this article we present a novel probabilistic technique for solving the full SLAM problem by jointly solving the data registration problem and the accurate reconstruction of the underlying geometry. The key idea of this paper is to incorporate spatial correlation models as prior knowledge on the map we seek to construct. We formulate the mapping problem as a maximum a-posteriori estimation comprising common probabilistic motion and sensor models as well as two spatial correlation models to guide the optimization. Instead of discarding data at an early stage, our algorithm makes use of all data available in the optimization process. When applied to SLAM, our method generates maps that closely resemble the real environment. We compare our approach to state-of-the-art algorithms, using both real and synthetic data sets.}, 
  keywords={SLAM problem;data registration problem;maximum a-posteriori estimation;mobile robots;optimization;probabilistic mapping;spatial correlation models;SLAM (robots);maximum likelihood estimation;mobile robots;motion control;navigation;optimisation;}, 
  doi={10.1109/ROBOT.2010.5509884}, 
  ISSN={1050-4729},
  file = {pdf:pitzer10a_icra.pdf},
}

@INPROCEEDINGS{icra10b, 
  author={Pitzer, B. and Kammel, S. and DuHadway, C. and Becker, J.}, 
  booktitle={IEEE International Conference on Robotics and Automation (ICRA)}, 
  title={Automatic reconstruction of textured 3D models}, 
  year={2010}, 
  month={May}, 
  volume={}, 
  number={}, 
  pages={3486--3493}, 
  abstract={This paper describes a system for automatic mapping and generation of textured 3D models of indoor environments without user interaction. Our data acquisition system is based on a Segway RMP platform which allows us to automatically acquire large amounts of textured 3D scans in a short amount of time. The first data processing step is registration and mapping. We propose a probabilistic, non-rigid registration method that incorporates statistical sensor models and surface prior distributions to optimize alignment and the reconstructed surface at the same time. Second, in order to fuse multiple scans and to reconstruct a consistent 3D surface representation, we incorporate a volumetric surface reconstruction method based on a oriented point. For the final step of texture reconstruction, we present a novel method to automatically generate blended textures from multiple images and multiple scans which are mapped onto the 3D model for photo-realistic visualization. We conclude our report with results from a large-scale, real-world experiment. The most significant contribution of this research is a functional system that covers all steps required to automatically reconstruct textured 3D models of large indoor environments.}, 
  keywords={3D surface representation;Segway RMP platform;automatic mapping;data acquisition system;photo realistic visualization;probabilistic nonrigid registration method;statistical sensor model;statistical sensor models;surface reconstruction;textured 3D models automatic reconstruction;data acquisition;data visualisation;image reconstruction;image scanners;image texture;mobile robots;optimisation;robot vision;solid modelling;surface reconstruction;}, 
  doi={10.1109/ROBOT.2010.5509568}, 
  ISSN={1050-4729},
  file = pdf:pitzer10b_icra.pdf,
}

@INPROCEEDINGS{iv08a,
  author = {Tobias Gindele and Daniel Jagszent and Benjamin Pitzer and Rüdiger Dillmann},
  title = {Design of the Planner of Team AnnieWAY's Autonomous Vehicle used in the DARPA Urban Challenge 2007},
  booktitle = {IEEE Intelligent Vehicles Symposium},
  year = {2008},
  address = {Eindhoven, Netherlands},
  file = pdf:gindele08_iv.pdf,
}

@INPROCEEDINGS{iv08b,
  author = {S{\"o}ren Kammel and Benjamin Pitzer},
  title = {Lidar-based Lane Marker Detection and Mapping},
  booktitle = {IEEE Intelligent Vehicles Symposium},
  year = {2008},
  address = {Eindhoven, Netherlands},
  file = pdf:kammel08_iv.pdf,
}

@ARTICLE{jfr08,
  author = {S{\"o}ren Kammel and  Julius Ziegler and Benjamin Pitzer and Moritz
Werling and Tobias Gindele and Daniel Jagzent and Joachim Schröder
and Michael Thuy and Matthias Goebl and Felix von Hundelshausen,
Oliver Pink and Christian Frese and Christoph Stiller},
  title = {Team AnnieWAY's Autonomous System for the 2007 DARPA Urban Challenge},
  journal = {Journal of Field Robotics},
  year = {2008},
  volume = {25},
  pages = {615--639},
  month = {September},
  file = pdf:kammel08_jfr.pdf,
}

@INCOLLECTION{robovision08,
  author = {Christoph Stiller and Sören Kammel and Benjamin Pitzer and Julius Ziegler and Moritz Werling and Tobias Gindele and Daniel Jagszent},
  title = {Team AnnieWAYs Autonomous System},
  booktitle = {Robot Vision},
  publisher = {Springer Berlin / Heidelberg},
  year = {2008},
  volume = {4931},
  series = {Lecture Notes in Computer Science},
  pages = {248--259},
  month = {January},
  abstract = {This paper reports on AnnieWAY, an autonomous vehicle that is capable
of driving through urban scenarios and that has successfully entered
the finals of the DARPA Urban Challenge 2007 competition. After describing
the main challenges imposed and the major hardware components, we
outline the underlying software structure and focus on selected algorithms.
A recent laser scanner plays the prominent role in the perception
of the environment. It measures range and reflectivity for each pixel.
While the former is used to provide 3D scene geometry, the latter
allows robust lane marker detection. Mission and maneuver selection
is conducted via a concurrent hierarchical state machine that specifically
ascertains behavior in accordance with California traffic rules.
We conclude with a report of the results achieved during the competition.},
  doi = {10.1007/978-3-540-78157-8},
  url = {http://www.springerlink.com/content/d1g33x1p6635n267/},
  file = pdf:stiller08_rv.pdf,
}

@INPROCEEDINGS{fas08,
  author = {Julius Ziegler and Benjamin Pitzer},
  title = {Bahnplanung für das autonome Fahrzeug AnnieWAY},
  booktitle = {5. Workshop Fahrerassistenzsysteme},
  year = {2008},
  address = {Walting, Germany},
  file = pdf:ziegler08_fas.pdf,
}

@INPROCEEDINGS{vmv06,
  author = {Benjamin Pitzer and Lars Libuda and Karl-Friedrich Kraiss},
  title = {Knowledge-based Scene Analysis in Indoor Environments Using Colour
and Range Images},
  booktitle = {Vision, Modeling, and Visualization Conference (VMV)},
  year = {2006},
  editor = {L. Kobbelt and T. Kuhlen and T. Aach and R. Westermann},
  pages = {33--40},
  address = {Aachen, Germany},
  month = {November 22-24},
  publisher = {Aka GmbH},
  abstract = {Object recognition from camera images is inherently an ambiguous problem.
Even when stereo vision techniques are used, it is difficult to perform
robust object recognition. Humans have a broad knowledge about their
environment and are able to use this knowledge to reason in unknown
environments.
In this paper we present a knowledge based system to analyze single
color and range images of indoor scenes inspired by human visual
perception. The input images are recursively processed over four
layers of abstraction resulting in a semantic scene description.
A generic scene model of typical indoor environments is used as a
priori knowledge. This model is encoded in semantic networks for
explicit knowledge representation. The developed system is applied
to images of artificial and real world indoor scenes, where it demonstrates
good reconstruction rates.},
  file = pdf:pitzer06_vmv.pdf,
}