@article{bej12hfes,
title = {Tools for Predicting the Duration and Variability of Skilled Performance without Skilled Performers},
author = { Bonnie E. John and Evan W. Patton and Wayne D. Gray and Donald F. Morrison},
doi = {10.1177/1071181312561206},
year = {2012},
date = {2012-01-01},
booktitle = {56th Annual Conference of the Human Factors & Ergonomics Society},
journal = {Proceedings of the Human Factors and Ergonomics Society Annual Meeting},
volume = {56},
number = {1},
pages = {985-989},
publisher = {HFES},
address = {Santa Monica, CA},
abstract = {Many devices are designed to allow skilled users to complete routine tasks quickly, often within a specified amount of time. Predictive human performance modeling has long been able to predict the mean time to accomplish a task, making it possible to compare device designs before building them. However, estimates of the variability of performance are also important, especially in real-time, safety-critical tasks. Until recently, the human factors community lacked tools to predict the variability of skilled performance. In this paper, we describe a combination of theory-based tools (CogTool and SANLab) that address this critical gap and that can easily be used by human factors practitioners or system designers. We describe these tools, their integration, and provide a concrete example of their use in the context of entering the landing speed into the Boeing 777 Flight Management Computer (FMC) using the Control and Display Unit (CDU).},
keywords = {{CogTool, SANLab}},
pubstate = {published},
tppubtype = {article}
}
Many devices are designed to allow skilled users to complete routine tasks quickly, often within a specified amount of time. Predictive human performance modeling has long been able to predict the mean time to accomplish a task, making it possible to compare device designs before building them. However, estimates of the variability of performance are also important, especially in real-time, safety-critical tasks. Until recently, the human factors community lacked tools to predict the variability of skilled performance. In this paper, we describe a combination of theory-based tools (CogTool and SANLab) that address this critical gap and that can easily be used by human factors practitioners or system designers. We describe these tools, their integration, and provide a concrete example of their use in the context of entering the landing speed into the Boeing 777 Flight Management Computer (FMC) using the Control and Display Unit (CDU).
@article{patton12hfes,
title = {Automated CPM-GOMS Modeling from Human Data},
author = { Evan W. Patton and Wayne D. Gray and Bonnie E. John},
doi = {10.1177/1071181312561210},
year = {2012},
date = {2012-01-01},
journal = {Proceedings of the Human Factors and Ergonomics Society Annual Meeting},
volume = {56},
number = {1},
pages = {1005-1009},
publisher = {HFES},
address = {Santa Monica, CA},
abstract = {Our work with the Argus Prime (Schoelles & Gray, 2001) simulated task environment has uncovered a variety of strategies that subjects use, at least sometimes, during target acquisition. However, it is difficult to determine how well subjects implement these strategies and, if implemented, how much these strategies contribute to overall performance. Recently, we have adopted Byrne and Kirlik's (2002) cognitive-ecological approach to determine what strategies work best in different task environments. In the work reported here, we took one computational cognitive model and, holding all else constant, swapped in and out alternative strategies for target acquisition. We then ran each of these simulated human users ten times through each of four interface conditions.},
keywords = {{CogTool, Log Analyzer}, SANLab},
pubstate = {published},
tppubtype = {article}
}
Our work with the Argus Prime (Schoelles & Gray, 2001) simulated task environment has uncovered a variety of strategies that subjects use, at least sometimes, during target acquisition. However, it is difficult to determine how well subjects implement these strategies and, if implemented, how much these strategies contribute to overall performance. Recently, we have adopted Byrne and Kirlik's (2002) cognitive-ecological approach to determine what strategies work best in different task environments. In the work reported here, we took one computational cognitive model and, holding all else constant, swapped in and out alternative strategies for target acquisition. We then ran each of these simulated human users ten times through each of four interface conditions.