Merge potential next release v0.4 (#187) Breaking Changes
* First draft of the new n-dimensional arrays + NB use case * Improves default implementation of multiple Array methods * Refactors tree methods * Adds matrix decomposition routines * Adds matrix decomposition methods to ndarray and nalgebra bindings * Refactoring + linear regression now uses array2 * Ridge & Linear regression * LBFGS optimizer & logistic regression * LBFGS optimizer & logistic regression * Changes linear methods, metrics and model selection methods to new n-dimensional arrays * Switches KNN and clustering algorithms to new n-d array layer * Refactors distance metrics * Optimizes knn and clustering methods * Refactors metrics module * Switches decomposition methods to n-dimensional arrays * Linalg refactoring - cleanup rng merge (#172) * Remove legacy DenseMatrix and BaseMatrix implementation. Port the new Number, FloatNumber and Array implementation into module structure. * Exclude AUC metrics. Needs reimplementation * Improve developers walkthrough New traits system in place at `src/numbers` and `src/linalg` Co-authored-by: Lorenzo <tunedconsulting@gmail.com> * Provide SupervisedEstimator with a constructor to avoid explicit dynamical box allocation in 'cross_validate' and 'cross_validate_predict' as required by the use of 'dyn' as per Rust 2021 * Implement getters to use as_ref() in src/neighbors * Implement getters to use as_ref() in src/naive_bayes * Implement getters to use as_ref() in src/linear * Add Clone to src/naive_bayes * Change signature for cross_validate and other model_selection functions to abide to use of dyn in Rust 2021 * Implement ndarray-bindings. Remove FloatNumber from implementations * Drop nalgebra-bindings support (as decided in conf-call to go for ndarray) * Remove benches. Benches will have their own repo at smartcore-benches * Implement SVC * Implement SVC serialization. Move search parameters in dedicated module * Implement SVR. Definitely too slow * Fix compilation issues for wasm (#202) Co-authored-by: Luis Moreno <morenol@users.noreply.github.com> * Fix tests (#203) * Port linalg/traits/stats.rs * Improve methods naming * Improve Display for DenseMatrix Co-authored-by: Montana Low <montanalow@users.noreply.github.com> Co-authored-by: VolodymyrOrlov <volodymyr.orlov@gmail.com>
This commit is contained in:
+36
-25
@@ -10,59 +10,70 @@
|
||||
//!
|
||||
//! ```
|
||||
//! use smartcore::metrics::mean_absolute_error::MeanAbsoluteError;
|
||||
//! use smartcore::metrics::Metrics;
|
||||
//! let y_pred: Vec<f64> = vec![3., -0.5, 2., 7.];
|
||||
//! let y_true: Vec<f64> = vec![2.5, 0.0, 2., 8.];
|
||||
//!
|
||||
//! let mse: f64 = MeanAbsoluteError {}.get_score(&y_pred, &y_true);
|
||||
//! let mse: f64 = MeanAbsoluteError::new().get_score(&y_pred, &y_true);
|
||||
//! ```
|
||||
//!
|
||||
//! <script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script>
|
||||
//! <script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script>
|
||||
use std::marker::PhantomData;
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::linalg::BaseVector;
|
||||
use crate::math::num::RealNumber;
|
||||
use crate::linalg::basic::arrays::ArrayView1;
|
||||
use crate::numbers::basenum::Number;
|
||||
|
||||
use crate::metrics::Metrics;
|
||||
|
||||
/// Coefficient of Determination (R2)
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[derive(Debug)]
|
||||
pub struct R2 {}
|
||||
pub struct R2<T> {
|
||||
_phantom: PhantomData<T>,
|
||||
}
|
||||
|
||||
impl R2 {
|
||||
impl<T: Number> Metrics<T> for R2<T> {
|
||||
/// create a typed object to call R2 functions
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
fn new_with(_parameter: f64) -> Self {
|
||||
Self {
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
/// Computes R2 score
|
||||
/// * `y_true` - Ground truth (correct) target values.
|
||||
/// * `y_pred` - Estimated target values.
|
||||
pub fn get_score<T: RealNumber, V: BaseVector<T>>(&self, y_true: &V, y_pred: &V) -> T {
|
||||
if y_true.len() != y_pred.len() {
|
||||
fn get_score(&self, y_true: &dyn ArrayView1<T>, y_pred: &dyn ArrayView1<T>) -> f64 {
|
||||
if y_true.shape() != y_pred.shape() {
|
||||
panic!(
|
||||
"The vector sizes don't match: {} != {}",
|
||||
y_true.len(),
|
||||
y_pred.len()
|
||||
y_true.shape(),
|
||||
y_pred.shape()
|
||||
);
|
||||
}
|
||||
|
||||
let n = y_true.len();
|
||||
|
||||
let mut mean = T::zero();
|
||||
|
||||
for i in 0..n {
|
||||
mean += y_true.get(i);
|
||||
}
|
||||
|
||||
mean /= T::from_usize(n).unwrap();
|
||||
let n = y_true.shape();
|
||||
|
||||
let mean: f64 = y_true.mean_by();
|
||||
let mut ss_tot = T::zero();
|
||||
let mut ss_res = T::zero();
|
||||
|
||||
for i in 0..n {
|
||||
let y_i = y_true.get(i);
|
||||
let f_i = y_pred.get(i);
|
||||
ss_tot += (y_i - mean).square();
|
||||
ss_res += (y_i - f_i).square();
|
||||
let y_i = *y_true.get(i);
|
||||
let f_i = *y_pred.get(i);
|
||||
ss_tot += (y_i - T::from(mean).unwrap()) * (y_i - T::from(mean).unwrap());
|
||||
ss_res += (y_i - f_i) * (y_i - f_i);
|
||||
}
|
||||
|
||||
T::one() - (ss_res / ss_tot)
|
||||
(T::one() - ss_res / ss_tot).to_f64().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,8 +87,8 @@ mod tests {
|
||||
let y_true: Vec<f64> = vec![3., -0.5, 2., 7.];
|
||||
let y_pred: Vec<f64> = vec![2.5, 0.0, 2., 8.];
|
||||
|
||||
let score1: f64 = R2 {}.get_score(&y_true, &y_pred);
|
||||
let score2: f64 = R2 {}.get_score(&y_true, &y_true);
|
||||
let score1: f64 = R2::new().get_score(&y_true, &y_pred);
|
||||
let score2: f64 = R2::new().get_score(&y_true, &y_true);
|
||||
|
||||
assert!((score1 - 0.948608137).abs() < 1e-8);
|
||||
assert!((score2 - 1.0).abs() < 1e-8);
|
||||
|
||||
Reference in New Issue
Block a user