From 7bb98946a27dfdfd933b0d384a292944173141a1 Mon Sep 17 00:00:00 2001 From: Varun Agrawal Date: Sat, 19 Oct 2024 17:03:15 -0400 Subject: [PATCH] missed two templates --- gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h index 1aee33a72..bd106afbe 100644 --- a/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h +++ b/gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h @@ -51,7 +51,7 @@ double HestenesStiefel(const Gradient ¤tGradient, const Gradient &prevGradient, const Gradient &direction) { // Hestenes-Stiefel: beta = g_n'*(g_n-g_n-1)/(-s_n-1')*(g_n-g_n-1) - VectorValues d = currentGradient - prevGradient; + Gradient d = currentGradient - prevGradient; const double beta = std::max(0.0, currentGradient.dot(d) / -direction.dot(d)); return beta; } @@ -59,7 +59,7 @@ double HestenesStiefel(const Gradient ¤tGradient, /// The Dai-Yuan formula for computing β, the direction of steepest descent. template double DaiYuan(const Gradient ¤tGradient, const Gradient &prevGradient, - const VectorValues &direction) { + const Gradient &direction) { // Dai-Yuan: beta = g_n'*g_n/(-s_n-1')*(g_n-g_n-1) const double beta = std::max(0.0, currentGradient.dot(currentGradient) /