From c0965091b66f5ea762468955ab39005f398c87f2 Mon Sep 17 00:00:00 2001
From: Iago Bonnici <iago.bonnici@umontpellier.fr>
Date: Thu, 29 Aug 2024 16:20:48 +0200
Subject: [PATCH] Expose `main/linsearch` csv trace files.

---
 src/config.rs             |  2 +-
 src/config/check.rs       | 69 +++++++++++++++++++++++++++++---------
 src/config/defaults.rs    | 67 ++++++++++++++++++++++---------------
 src/config/deserialize.rs | 13 ++++----
 src/learn.rs              |  5 +--
 src/lib.rs                |  2 +-
 src/optim.rs              | 70 +++++++++++++++++++++------------------
 src/optim/bfgs.rs         | 61 ++++++++++++++++------------------
 src/optim/gd.rs           | 12 +++----
 src/optim/wolfe_search.rs | 41 +++++++++++++----------
 10 files changed, 199 insertions(+), 143 deletions(-)

diff --git a/src/config.rs b/src/config.rs
index b5496e9..1f265e4 100644
--- a/src/config.rs
+++ b/src/config.rs
@@ -19,7 +19,7 @@ use crate::{
 
 pub mod check;
 pub(crate) mod defaults;
-mod deserialize;
+pub(crate) mod deserialize;
 
 // The final value handed out to user.
 pub struct Config {
diff --git a/src/config/check.rs b/src/config/check.rs
index 4602eb5..d824e46 100644
--- a/src/config/check.rs
+++ b/src/config/check.rs
@@ -8,6 +8,7 @@ use std::{
 };
 
 use arrayvec::ArrayVec;
+use colored::Colorize;
 use regex::Regex;
 use snafu::{ensure, Snafu};
 
@@ -143,7 +144,7 @@ impl TryFrom<&'_ raw::Config> for Search {
             init_parms: Parameters::try_from(&raw.init, &raw.gf_times)?,
             init_data_reduction_factor,
             init_descent: (&raw.search.init_descent).try_into()?,
-            bfgs: (&raw.search.bfgs).try_into()?,
+            bfgs: BfgsConfig::try_from(&raw.search.bfgs, raw.gf_times.len())?,
             final_descent: raw
                 .search
                 .final_descent
@@ -169,16 +170,15 @@ impl TryFrom<&'_ raw::GdConfig> for GdConfig {
     }
 }
 
-impl TryFrom<&'_ raw::BfgsConfig> for BfgsConfig {
-    type Error = Error;
-    fn try_from(raw: &raw::BfgsConfig) -> Result<Self, Self::Error> {
+impl BfgsConfig {
+    fn try_from(raw: &raw::BfgsConfig, n_gf_times: usize) -> Result<Self, Error> {
         let &raw::BfgsConfig {
             max_iter,
             ref wolfe_search,
             step_size_threshold,
             ref slope_tracking,
-            ref main_trace_path,
-            ref linsearch_trace_path,
+            ref main_trace,
+            ref linsearch_trace,
         } = raw;
         ensure!(
             0. <= step_size_threshold,
@@ -187,19 +187,45 @@ impl TryFrom<&'_ raw::BfgsConfig> for BfgsConfig {
                   must be null or positive. Received: {step_size_threshold}.")
             )
         );
-        let check_path = |path: &Option<PathBuf>| -> Result<_, Self::Error> {
+        let check_path = |path: &Option<PathBuf>| -> Result<_, Error> {
             let Some(path) = path else {
                 return Ok(None);
             };
-            let path = io::canonicalize(path)?;
             if path.is_file() {
+                let path = io::canonicalize(path)?;
                 eprintln!("Will override {:?}.", path.to_string_lossy());
-            };
-            if let Some(parent) = path.parent() {
-                ensure!(parent.exists(), NoSuchFolderErr { path });
+            } else {
+                ensure!(
+                    !path.is_dir(),
+                    err!(("Path designates a folder: {:?}", path.to_string_lossy()))
+                );
+                if let Some(parent) = path.parent() {
+                    ensure!(parent.exists(), NoSuchFolderErr { path: parent });
+                } else {
+                    return err!((
+                        "Path does not exist and has no parent: {:?}",
+                        path.to_string_lossy()
+                    ))
+                    .fail();
+                }
             }
-            Ok(Some(path))
+            Ok(Some(path.clone()))
         };
+        let main_trace_path = check_path(main_trace)?;
+        let linsearch_trace_path = check_path(linsearch_trace)?;
+        if let (None, Some(path)) = (&main_trace_path, &linsearch_trace) {
+            return err!((
+                "A path is specified with {linopt} \
+                 to log the detailed trace of BFGS linear search during each step, \
+                 but none is specified to log the global trace of each step. \
+                 Consider setting {mainopt} option.\n\
+                 The path given was: {path}",
+                linopt = "`search.bfgs.linsearch_trace`".blue(),
+                mainopt = "`search.bfgs.main_trace`".blue(),
+                path = format!("{:?}", path.to_string_lossy()).black(),
+            ))
+            .fail();
+        }
         Ok(Self {
             max_iter,
             slope_tracking: slope_tracking
@@ -208,9 +234,20 @@ impl TryFrom<&'_ raw::BfgsConfig> for BfgsConfig {
                 .transpose()?,
             wolfe: wolfe_search.try_into()?,
             small_step: step_size_threshold,
-            // TODO: expose.
-            main_trace_path: check_path(main_trace_path)?,
-            linsearch_trace_path: check_path(linsearch_trace_path)?,
+            main_trace_path,
+            linsearch_trace_path,
+            // Unexposed: the meaning of variables is bound to the program internals.
+            // /!\ Order here must match order within the optimised tensors.
+            variable_names: Some({
+                let mut names = ["theta", "tau_1", "delta_tau", "gf", "ac", "bc", "ancient"]
+                    .iter()
+                    .map(ToString::to_string)
+                    .collect::<Vec<_>>();
+                for i in 0..n_gf_times {
+                    names.push(format!("gf_time_{i}"));
+                }
+                names
+            }),
         })
     }
 }
@@ -572,7 +609,7 @@ pub enum Error {
     Io { source: io::Error },
     #[snafu(transparent)]
     Parse { source: raw::Error },
-    #[snafu(display("Configuration error: {mess}"))]
+    #[snafu(display("Configuration: {mess}"))]
     Config { mess: String },
     #[snafu(transparent)]
     Optim { source: optim::Error },
diff --git a/src/config/defaults.rs b/src/config/defaults.rs
index dbe9681..37dcd8d 100644
--- a/src/config/defaults.rs
+++ b/src/config/defaults.rs
@@ -34,7 +34,7 @@ pub(crate) fn search() -> Search {
     Search {
         init_data_reduction_factor: init_data_reduction_factor(),
         init_descent: init_descent(),
-        bfgs: bfgs(),
+        bfgs: BfgsConfig::default(),
         final_descent: None,
     }
 }
@@ -51,37 +51,50 @@ pub(crate) fn init_descent() -> GdConfig {
     }
 }
 
-pub(crate) fn bfgs() -> BfgsConfig {
-    BfgsConfig {
-        max_iter: 1_000,
-        wolfe_search: wolfe_search(),
-        step_size_threshold: 1e-9,
-        slope_tracking: Some(SlopeTrackingConfig { sample_size: 20, threshold: 1e-3, grain: 5 }),
-        main_trace_path: None,
-        linsearch_trace_path: None,
+impl Default for BfgsConfig {
+    fn default() -> Self {
+        BfgsConfig {
+            max_iter: 1_000,
+            wolfe_search: WolfeSearchConfig::default(),
+            step_size_threshold: 1e-9,
+            slope_tracking: Some(SlopeTrackingConfig {
+                sample_size: 20,
+                threshold: 1e-3,
+                grain: 5,
+            }),
+            main_trace: None,
+            linsearch_trace: None,
+        }
     }
 }
 
-pub(crate) fn wolfe_search() -> WolfeSearchConfig {
-    // Taken from Nocedal and Wright 2006.
-    let c1 = 1e-4;
-    let c2 = 0.1;
-    WolfeSearchConfig {
-        c1,
-        c2,
-        init_step_size: 1.0,
-        step_decrease_factor: c2,
-        step_increase_factor: 10., // (custom addition)
-        flat_gradient: 1e-20,
-        bisection_threshold: 1e-1,
+impl Default for WolfeSearchConfig {
+    fn default() -> Self {
+        // Taken from Nocedal and Wright 2006.
+        let c1 = 1e-4;
+        let c2 = 0.1;
+        WolfeSearchConfig {
+            c1,
+            c2,
+            init_step_size: 1.0,
+            step_decrease_factor: c2,
+            step_increase_factor: 10., // (custom addition)
+            flat_gradient: 1e-20,
+            bisection_threshold: 1e-1,
+        }
     }
 }
 
-#[allow(dead_code)] // Not the default anymore.
-pub(crate) fn final_descent() -> GdConfig {
-    GdConfig {
-        max_iter: 1_000,
-        learning_rate: 1e-5,
-        slope_tracking: Some(SlopeTrackingConfig { sample_size: 100, threshold: 1e-3, grain: 50 }),
+impl Default for GdConfig {
+    fn default() -> Self {
+        GdConfig {
+            max_iter: 1_000,
+            learning_rate: 1e-5,
+            slope_tracking: Some(SlopeTrackingConfig {
+                sample_size: 100,
+                threshold: 1e-3,
+                grain: 50,
+            }),
+        }
     }
 }
diff --git a/src/config/deserialize.rs b/src/config/deserialize.rs
index d5d51d1..70f56e6 100644
--- a/src/config/deserialize.rs
+++ b/src/config/deserialize.rs
@@ -129,25 +129,24 @@ pub(crate) struct Search {
     pub(crate) init_data_reduction_factor: f64,
     #[serde(default = "defaults::init_descent")]
     pub(crate) init_descent: GdConfig,
-    #[serde(default = "defaults::bfgs")]
+    #[serde(default)]
     pub(crate) bfgs: BfgsConfig,
     pub(crate) final_descent: Option<GdConfig>,
 }
 
 #[derive(Deserialize)]
-#[serde(deny_unknown_fields)]
+#[serde(deny_unknown_fields, default)]
 pub(crate) struct BfgsConfig {
     pub(crate) max_iter: u64,
-    #[serde(default = "defaults::wolfe_search")]
     pub(crate) wolfe_search: WolfeSearchConfig,
     pub(crate) step_size_threshold: f64,
     pub(crate) slope_tracking: Option<SlopeTrackingConfig>,
-    pub(crate) main_trace_path: Option<PathBuf>,
-    pub(crate) linsearch_trace_path: Option<PathBuf>,
+    pub(crate) main_trace: Option<PathBuf>,
+    pub(crate) linsearch_trace: Option<PathBuf>,
 }
 
 #[derive(Deserialize)]
-#[serde(deny_unknown_fields)]
+#[serde(deny_unknown_fields, default)]
 pub(crate) struct WolfeSearchConfig {
     pub(crate) c1: f64,
     pub(crate) c2: f64,
@@ -159,7 +158,7 @@ pub(crate) struct WolfeSearchConfig {
 }
 
 #[derive(Deserialize)]
-#[serde(deny_unknown_fields)]
+#[serde(deny_unknown_fields, default)]
 pub(crate) struct GdConfig {
     pub(crate) max_iter: u64,
     pub(crate) learning_rate: f64,
diff --git a/src/learn.rs b/src/learn.rs
index 58c6dbf..e332d32 100644
--- a/src/learn.rs
+++ b/src/learn.rs
@@ -16,7 +16,7 @@ use crate::{
         scenarios::Scenario,
         scores::Scores,
     },
-    optim::{Error as OptimError, Optim, OptimResult, OptimTensor},
+    optim::{Error as OptimError, OptimResult, OptimTensor},
     GeneTriplet, Parameters,
 };
 
@@ -146,7 +146,8 @@ pub fn optimize_likelihood(
 
     println!("-- BFGS learning.");
     let mut opt: Box<dyn OptimResult>;
-    opt = Box::new(search.bfgs.minimize(f, &p, 0)?);
+    // Log every step if a file was provided.
+    opt = Box::new(search.bfgs.minimize(f, &p)?);
     let p = opt.best_vars();
     n_eval += opt.n_eval();
     n_diff += opt.n_diff();
diff --git a/src/lib.rs b/src/lib.rs
index 386a2b3..7630d75 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -4,7 +4,7 @@ pub mod genes_forest;
 pub mod interner;
 mod io;
 pub mod it_mean;
-mod learn;
+pub mod learn;
 mod lexer;
 mod model;
 mod optim;
diff --git a/src/optim.rs b/src/optim.rs
index fa7eb1f..8b31ca1 100644
--- a/src/optim.rs
+++ b/src/optim.rs
@@ -1,6 +1,12 @@
 // Disatisfied with tch-rs optimisers,
 // here is a collection of homebrew optimisers procedures
 // built on tch-rs Tensors and autograd.
+//
+// These don't share a common 'Optim' trait (anymore),
+// but they implement some `minimize(f, init, ..)` method
+// to minimize the given scalar function y = f(x),
+// assuming that the tensor 'x'
+// contains all and only the gradient-tracking leaves of it.
 
 pub(crate) mod bfgs;
 pub(crate) mod gd;
@@ -16,24 +22,7 @@ use tch::Tensor;
 use tensor::Loggable;
 pub(crate) use tensor::OptimTensor;
 
-// Abstract trait shared by optimisers.
-pub(crate) trait Optim {
-    type Error: Into<Error>;
-    type Result: OptimResult;
-    // Minimize the given scalar function y = f(x).
-    // assuming that the tensor 'x'
-    // contains all and only the gradient-tracking leaves of it.
-    // The 'log' parameters sets the number of steps to wait before displaying one.
-    // Set to 0 to not log anything.
-    fn minimize(
-        &self,
-        f: impl Fn(&Tensor) -> Tensor,
-        init: &Tensor,
-        log: u64,
-    ) -> Result<Self::Result, Error>;
-}
-
-// Abstact over the possible return values of the minimisation function.
+// Abstact over the possible optimisations results.
 pub(crate) trait OptimResult {
     fn best_vars(&self) -> &Tensor;
     fn best_loss(&self) -> f64;
@@ -211,7 +200,7 @@ impl<'c> SlopeTracker<'c> {
 #[derive(Debug, Snafu)]
 #[snafu(context(suffix(Err)))]
 pub enum Error {
-    #[snafu(display("Configuration error:\n{mess}"))]
+    #[snafu(display("Configuration:\n{mess}"))]
     Config { mess: String },
     #[snafu(display("Gradient descent failure:\n{source}"))]
     Gd { source: gd::Error },
@@ -247,7 +236,7 @@ error_convert! {
 
 #[cfg(test)]
 mod tests {
-    use std::{fs::File, io::Write, path::PathBuf};
+    use std::{fs::File, io::Write, num::NonZero, path::PathBuf};
 
     use float_eq::float_eq;
     use rand::{
@@ -258,12 +247,10 @@ mod tests {
     use rand_distr::StandardNormal;
     use tch::{Device, Kind, Tensor};
 
-    use crate::{
-        config::defaults,
-        optim::{
-            bfgs::Config as BfgsConfig, gd::Config as GdConfig, History, Optim, OptimResult,
-            OptimTensor, SlopeTrackingConfig,
-        },
+    use super::wolfe_search::Config as WolfeSearchConfig;
+    use crate::optim::{
+        bfgs::Config as BfgsConfig, gd::Config as GdConfig, History, OptimResult, OptimTensor,
+        SlopeTrackingConfig,
     };
 
     // Generate files to debug simple optimisation tests.
@@ -294,10 +281,27 @@ mod tests {
     }
 
     fn bfgs_config() -> BfgsConfig {
-        let mut bfgs: BfgsConfig = (&defaults::bfgs()).try_into().unwrap();
-        bfgs.main_trace_path = Some(PathBuf::from("./target/bfgs.csv"));
-        bfgs.linsearch_trace_path = Some(PathBuf::from("./target/bfgs_linsearch.csv"));
-        bfgs
+        BfgsConfig {
+            max_iter: 1_000,
+            wolfe: WolfeSearchConfig {
+                c1: 1e-4,
+                c2: 0.1,
+                init_step: 1.0,
+                step_decrease: 0.1,
+                step_increase: 10., // (custom addition)
+                flat_gradient: 1e-20,
+                bisection_threshold: 1e-1,
+            },
+            small_step: 1e-9,
+            slope_tracking: Some(SlopeTrackingConfig {
+                history_size: NonZero::new(20).unwrap(),
+                threshold: 1e-3,
+                grain: 5,
+            }),
+            main_trace_path: Some(PathBuf::from("./target/bfgs.csv")),
+            linsearch_trace_path: Some(PathBuf::from("./target/bfgs_linsearch.csv")),
+            variable_names: None,
+        }
     }
 
     #[test]
@@ -333,7 +337,7 @@ mod tests {
 
         // Fit to data.
         let bfgs = bfgs_config();
-        let bfgs = bfgs.minimize(loss, &init, 1).unwrap();
+        let bfgs = bfgs.minimize(loss, &init).unwrap();
 
         // Check that we get close enough.
         let [oa, ob] = [0, 1].map(|i| bfgs.best_vars().get(i).to_double());
@@ -454,7 +458,7 @@ mod tests {
 
         let init = Tensor::from_slice(&init);
         let bfgs = bfgs_config();
-        let bfgs = bfgs.minimize(&loss, &init, 1).unwrap();
+        let bfgs = bfgs.minimize(&loss, &init).unwrap();
 
         // Extract optimum found.
         for (actual, expected) in (0..=1)
@@ -560,7 +564,7 @@ mod tests {
         let init = (0..20).map(|_| unif.sample(&mut rng)).collect::<Vec<_>>();
         let init = Tensor::from_slice(&init);
         let bfgs = bfgs_config();
-        let bfgs = bfgs.minimize(&loss, &init, 1).unwrap();
+        let bfgs = bfgs.minimize(&loss, &init).unwrap();
         for i in 0..20 {
             let oi = bfgs.best_vars().get(i).to_double();
             assert!(
diff --git a/src/optim/bfgs.rs b/src/optim/bfgs.rs
index fc006b4..eeb7c9e 100644
--- a/src/optim/bfgs.rs
+++ b/src/optim/bfgs.rs
@@ -19,7 +19,7 @@ use crate::optim::{
         self, Config as WolfeSearchConfig, Error as WolfeSearchError, LastStep, Location,
         Summary as WolfeSearchSummary,
     },
-    Best, Error as OptimError, Optim, OptimTensor, SlopeTracker, SlopeTrackingConfig,
+    Best, Error as OptimError, OptimTensor, SlopeTracker, SlopeTrackingConfig,
 };
 
 // The exposed config.
@@ -29,15 +29,15 @@ pub(crate) struct Config {
     pub(crate) wolfe: WolfeSearchConfig,
     // If required.
     pub(crate) slope_tracking: Option<SlopeTrackingConfig>,
-    // Logging.
-    pub(crate) main_trace_path: Option<PathBuf>,
-    pub(crate) linsearch_trace_path: Option<PathBuf>,
     // When gradient from one step to the next happen to be equal,
     // the hessian approximation breaks with a division by zero.
     // In this situation, consider that the search is over
     // if the step norm was shorter than this threshold value.
     pub(crate) small_step: f64,
-    // Paths for recording optimisation details.
+    // Logging.
+    pub(crate) main_trace_path: Option<PathBuf>,
+    pub(crate) linsearch_trace_path: Option<PathBuf>,
+    pub(crate) variable_names: Option<Vec<String>>,
 }
 
 #[derive(Debug)]
@@ -90,15 +90,12 @@ struct BfgsSearch<'c, F: Fn(&Tensor) -> Tensor> {
     lin_file: Option<File>,  // Linear search on every step.
 }
 
-impl Optim for Config {
-    type Error = Error;
-    type Result = BfgsResult;
-    fn minimize(
+impl Config {
+    pub fn minimize(
         &self,
         fun: impl Fn(&Tensor) -> Tensor,
         init: &Tensor,
-        log: u64,
-    ) -> Result<Self::Result, OptimError> {
+    ) -> Result<BfgsResult, OptimError> {
         let n_parms = init.size1().unwrap_or_else(|e| {
             panic!(
                 "Parameters must be single-dimensional. Received dimensions {:?} instead:\n{e}",
@@ -141,7 +138,6 @@ impl Optim for Config {
             Ok(if let Some(path) = path {
                 Some(File::create(path).context(TraceErr { path })?)
             } else {
-                ensure!(log == 0, NoTracePathErr { log });
                 None
             })
         };
@@ -166,27 +162,31 @@ impl Optim for Config {
             best,
         };
 
-        Ok(search.run_search(log)?)
+        Ok(search.run_search()?)
     }
 }
 
 impl<F: Fn(&Tensor) -> Tensor> BfgsSearch<'_, F> {
-    fn run_search(&mut self, log: u64) -> Result<BfgsResult, Error> {
+    fn run_search(&mut self) -> Result<BfgsResult, Error> {
         let cf = self.cf;
         if cf.max_iter == 0 {
             println!("No optimisation step asked for.");
             return Ok(self.result());
         }
 
-        if log > 0 {
+        let log = cf.main_trace_path.is_some();
+        if log {
             self.write_header()?;
-            wolfe_search::write_header(self.lin_file.as_mut().unwrap(), self.n_vars)?;
+            if cf.linsearch_trace_path.is_some() {
+                wolfe_search::write_header(
+                    self.lin_file.as_mut().unwrap(),
+                    self.cf.variable_names.as_ref().ok_or(self.n_vars),
+                )?;
+            }
             self.log(0.)?; // Fake initial zero step.
         }
 
         loop {
-            let log_this_one = log > 0 && self.n_steps % log == 0;
-
             // Pick a search direction.
             self.dir = (-&self.hess).mv(&self.grad);
 
@@ -195,11 +195,7 @@ impl<F: Fn(&Tensor) -> Tensor> BfgsSearch<'_, F> {
                 Location::new(&self.fun, &self.dir, &self.vars, self.loss, &self.grad),
                 &self.cf.wolfe,
                 &mut self.best,
-                (
-                    if log_this_one { log } else { 0 },
-                    self.lin_file.as_mut(),
-                    self.n_steps,
-                ),
+                (self.lin_file.as_mut(), self.n_steps),
             )?;
             self.n_eval += n_eval;
             self.n_diff += n_diff;
@@ -260,7 +256,7 @@ impl<F: Fn(&Tensor) -> Tensor> BfgsSearch<'_, F> {
             self.loss = loss_after_step;
             self.grad = grad_after_step;
 
-            if log_this_one {
+            if log {
                 self.log(step_size)?;
             }
 
@@ -288,9 +284,15 @@ impl<F: Fn(&Tensor) -> Tensor> BfgsSearch<'_, F> {
             .into_iter()
             .map(ToString::to_string)
             .collect::<Vec<_>>();
-        for vec in ["vars", "grad", "dir"] {
-            for i in 0..self.n_vars {
-                header.push(format!("{vec}_{i}"));
+        for vec in ["var", "grad", "dir"] {
+            if let Some(varnames) = &self.cf.variable_names {
+                for name in varnames {
+                    header.push(format!("{vec}_{name}"))
+                }
+            } else {
+                for i in 0..self.n_vars {
+                    header.push(format!("{vec}_{i}"));
+                }
             }
         }
         let file = self.main_file.as_mut().unwrap();
@@ -337,9 +339,4 @@ pub enum Error {
     WolfeSearch { source: WolfeSearchError },
     #[snafu(display("Error writing to trace file {:?}:\n{source}", path.display()))]
     Trace { path: PathBuf, source: io::Error },
-    #[snafu(display(
-        "A log level of {log} is required, \
-         but no file path has been choosen to record traces."
-    ))]
-    NoTracePath { log: u64 },
 }
diff --git a/src/optim/gd.rs b/src/optim/gd.rs
index 79e31aa..53b1101 100644
--- a/src/optim/gd.rs
+++ b/src/optim/gd.rs
@@ -9,8 +9,8 @@ use snafu::Snafu;
 use tch::Tensor;
 
 use crate::optim::{
-    simple_optim_result_impl, Best, Error as OptimError, Loggable, Optim, OptimTensor,
-    SlopeTracker, SlopeTrackingConfig,
+    simple_optim_result_impl, Best, Error as OptimError, Loggable, OptimTensor, SlopeTracker,
+    SlopeTrackingConfig,
 };
 
 #[derive(Debug, Serialize)]
@@ -29,15 +29,13 @@ pub(crate) struct GdResult {
 }
 simple_optim_result_impl!(GdResult);
 
-impl Optim for Config {
-    type Error = Error;
-    type Result = GdResult;
-    fn minimize(
+impl Config {
+    pub fn minimize(
         &self,
         fun: impl Fn(&Tensor) -> Tensor,
         init: &Tensor,
         log: u64,
-    ) -> Result<Self::Result, OptimError> {
+    ) -> Result<GdResult, OptimError> {
         let &Self { max_iter, step_size, ref slope_tracking } = self;
         let kindev = (init.kind(), init.device());
         let mut n_eval = 0;
diff --git a/src/optim/wolfe_search.rs b/src/optim/wolfe_search.rs
index f8657ca..104a50c 100644
--- a/src/optim/wolfe_search.rs
+++ b/src/optim/wolfe_search.rs
@@ -69,9 +69,6 @@ pub(crate) struct Search<'c, 's, F: Fn(&Tensor) -> Tensor> {
     // Borrow situation from the calling search step.
     loc: Location<'s, F>,
 
-    // Log level.
-    n_log: u64,
-
     // Evolvable search state.
     best: &'s mut Best, // Keep track of the best loss found so far.
 
@@ -87,11 +84,11 @@ pub(crate) struct Search<'c, 's, F: Fn(&Tensor) -> Tensor> {
 
 // Starting point of the linear search, supposed immutable in this context.
 pub(crate) struct Location<'s, F: Fn(&Tensor) -> Tensor> {
-    pub(crate) f: &'s F,         // Objective function.
-    pub(crate) p: &'s Tensor,    // Search direction.
-    pub(crate) x: &'s Tensor,    // Search starting point.
-    pub(crate) y: f64,           // Search starting loss value = phi(0).
-    pub(crate) phigrad: f64,     // Derivative = phi'(0) = p . ∇f(x).
+    pub(crate) f: &'s F,      // Objective function.
+    pub(crate) p: &'s Tensor, // Search direction.
+    pub(crate) x: &'s Tensor, // Search starting point.
+    pub(crate) y: f64,        // Search starting loss value = phi(0).
+    pub(crate) phigrad: f64,  // Derivative = phi'(0) = p . ∇f(x).
 }
 
 // Pack search variables in various types depending on the search phase.
@@ -466,10 +463,9 @@ impl<'c, 'binsearch, F: Fn(&Tensor) -> Tensor> Search<'c, 'binsearch, F> {
         x: &Tensor,            // short for xs = x + alpha * p
         grad: Option<&Tensor>, // = ∇f(xs)
     ) -> Result<(), Error> {
-        if self.n_log == 0 || self.id % self.n_log > 0 {
+        let Some(file) = self.trace.as_mut() else {
             return Ok(());
-        }
-        let file = self.trace.as_mut().unwrap();
+        };
         macro_rules! w {
             ($fmt:literal, $value:expr) => {
                 write!(file, $fmt, $value)?;
@@ -502,14 +498,26 @@ impl<'c, 'binsearch, F: Fn(&Tensor) -> Tensor> Search<'c, 'binsearch, F> {
     }
 }
 
-pub(crate) fn write_header(file: &mut File, n_vars: usize) -> Result<(), Error> {
+pub(crate) fn write_header(
+    file: &mut File,
+    varnames: Result<&Vec<String>, usize>, // Get at least number if there are no names.
+) -> Result<(), Error> {
     let mut header = ["id", "alpha", "loss", "phigrad"]
         .into_iter()
         .map(ToString::to_string)
         .collect::<Vec<_>>();
-    for vec in ["vars", "grad"] {
-        for i in 0..n_vars {
-            header.push(format!("{vec}_{i}"));
+    for vec in ["var", "grad"] {
+        match varnames {
+            Ok(names) => {
+                for name in names {
+                    header.push(format!("{vec}_{name}"));
+                }
+            }
+            Err(n_vars) => {
+                for i in 0..n_vars {
+                    header.push(format!("{vec}_{i}"));
+                }
+            }
         }
     }
     writeln!(file, "{}", header.join(","))?;
@@ -526,14 +534,13 @@ pub(crate) fn search<'s, F: Fn(&Tensor) -> Tensor>(
     loc: Location<'s, F>,
     cf: &Config,
     best: &'s mut Best,
-    (n_log, trace, id): (u64, Option<&'s mut File>, u64),
+    (trace, id): (Option<&'s mut File>, u64),
 ) -> Result<Summary, Error> {
     use Error as E;
     // Initialize.
     let search = Search {
         n_vars: loc.p.size1().unwrap().try_into().unwrap(),
         loc,
-        n_log,
         best,
         cf,
         n_eval: 0,
-- 
GitLab