@article {2383, title = {A Comparison of IRT Proficiency Estimation Methods Under Adaptive Multistage Testing}, journal = {Journal of Educational Measurement}, volume = {52}, number = {1}, year = {2015}, pages = {70{\textendash}79}, abstract = {This inquiry is an investigation of item response theory (IRT) proficiency estimators{\textquoteright} accuracy under multistage testing (MST). We chose a two-stage MST design that includes four modules (one at Stage 1, three at Stage 2) and three difficulty paths (low, middle, high). We assembled various two-stage MST panels (i.e., forms) by manipulating two assembly conditions in each module, such as difficulty level and module length. For each panel, we investigated the accuracy of examinees{\textquoteright} proficiency levels derived from seven IRT proficiency estimators. The choice of Bayesian (prior) versus non-Bayesian (no prior) estimators was of more practical significance than the choice of number-correct versus item-pattern scoring estimators. The Bayesian estimators were slightly more efficient than the non-Bayesian estimators, resulting in smaller overall error. Possible score changes caused by the use of different proficiency estimators would be nonnegligible, particularly for low- and high-performing examinees.}, issn = {1745-3984}, doi = {10.1111/jedm.12063}, url = {http://dx.doi.org/10.1111/jedm.12063}, author = {Kim, Sooyeon and Moses, Tim and Yoo, Hanwook (Henry)} }