research:ameva
% Search:
% Encoding: utf-8
@InProceedings{tenorth09dataset,
author = {Moritz Tenorth and Jan Bandouch and Michael Beetz},
title = {{The {TUM} Kitchen Data Set of Everyday Manipulation Activities for Motion Tracking and Action Recognition}},
booktitle = {IEEE International Workshop on Tracking Humans for the Evaluation of their Motion in Image Sequences (THEMIS), in conjunction with ICCV2009},
year = {2009},
bib2html_pubtype ={Conference Paper},
bib2html_rescat = {Perception},
bib2html_groups = {Memoman, K4C},
bib2html_funding = {CoTeSys},
bib2html_domain = {Assistive Household},
abstract = {We introduce the publicly available TUM Kitchen Data Set as a comprehensive collection of activity sequences recorded in a kitchen environment equipped with multiple complementary sensors. The recorded data consists of observations of naturally performed manipulation tasks as encountered in everyday activities of human life. Several instances of a table-setting task were performed by different subjects, involving the manipulation of objects and the environment. We provide the original video sequences, fullbody motion capture data recorded by a markerless motion tracker, RFID tag readings and magnetic sensor readings from objects and the environment, as well as corresponding action labels. In this paper, we both describe how the data was computed, in particular the motion tracker and the labeling, and give examples what it can be used for. We present first results of an automatic method for segmenting the observed motions into semantic classes, and describe how the data can be integrated in a knowledge-based framework for reasoning about the observations.}
}
@InProceedings{beetz09qlts,
author = {Michael Beetz and Jan Bandouch and Dominik Jain and Moritz Tenorth},
title = {{Towards Automated Models of Activities of Daily Life}},
booktitle = {First International Symposium on Quality of Life Technology -- Intelligent Systems for Better Living},
year = {2009},
address = {Pittsburgh, Pennsylvania USA},
bib2html_pubtype ={Conference Paper},
bib2html_rescat = {Learning,Models,Planning,Perception,Knowledge,Reasoning,Representation},
bib2html_groups = {Memoman,K4C,ProbCog},
bib2html_funding = {CoTeSys},
bib2html_domain = {Assistive Household},
abstract = {We propose automated probabilistic models of everyday
activities (AM-EvA) as a novel technical means for the
perception, interpretation, and analysis of everyday manipulation
tasks and activities of daily life. AM-EvAs are based on
action-related concepts in everyday activities such as
action-related places (the place where cups are taken from the
cupboard), capabilities (the objects that can be picked up
single-handedly), etc. These concepts are probabilistically derived
from a set of previous activities that are fully and automatically
observed by computer vision and additional sensor systems. AM-EvA
models enable robots and technical systems to analyze activities in
the complete situation and activity context. They render the
classification and the assessment of actions and situations objective
and can justify the probabilistic interpretation with respect to the
activities the concepts have been learned from.
In this paper, we describe the current state of implementation of the
system that realizes this idea of automated models of
everyday activities and show example results from the observation
and analysis of table setting episodes.}
}
@article{beetz10ameva,
title={{Towards Automated Models of Activities of Daily Life}},
author={Michael Beetz and Moritz Tenorth and Dominik Jain and Jan Bandouch},
journal={Technology and Disability},
volume={22},
number= {1-2},
pages={27--40},
year={2010},
publisher={IOS Press},
bib2html_pubtype = {Journal},
bib2html_groups = {K4C, Memoman, ProbCog},
bib2html_funding = {CoTeSys, MeMoMan},
bib2html_rescat = {Perception, Models, Representation, Learning, Reasoning},
bib2html_domain = {Assistive Household}
}