@inbook{10.2307/j.ctt7smmk.13,
ISBN = {9780691132983},
URL = {http://www.jstor.org/stable/j.ctt7smmk.13},
abstract = {The Newton method (Algorithm 5 in Chapter 6) applied to the gradient of a real-valued cost is the archetypal superlinear optimization method. The Newton method, however, suffers from a lack of global convergence and the prohibitive numerical cost of solving the Newton equation (6.2) necessary for each iteration. The trust-region approach, presented in Chapter 7, provides a sound framework for addressing these shortcomings and is a good choice for a generic optimization algorithm. Trust-region methods, however, are algorithmically complex and may not perform ideally on all problems. A host of other algorithms have been developed that provide lower-cost numerical iterations},
bookauthor = {P.-A. Absil and R. Mahony and R. Sepulchre},
booktitle = {Optimization Algorithms on Matrix Manifolds},
pages = {168--188},
publisher = {Princeton University Press},
title = {A Constellation of Superlinear Algorithms},
year = {2008}
}