fix: cargo fmt

This commit is contained in:
Volodymyr Orlov
2020-06-05 17:52:03 -07:00
parent 685be04488
commit a2784d6345
52 changed files with 3342 additions and 2829 deletions
+1 -1
View File
@@ -1,2 +1,2 @@
pub mod random_forest_classifier;
pub mod random_forest_regressor;
pub mod random_forest_regressor;
+84 -69
View File
@@ -4,43 +4,44 @@ use std::default::Default;
use std::fmt::Debug;
use rand::Rng;
use serde::{Serialize, Deserialize};
use serde::{Deserialize, Serialize};
use crate::math::num::FloatExt;
use crate::linalg::Matrix;
use crate::tree::decision_tree_classifier::{DecisionTreeClassifier, DecisionTreeClassifierParameters, SplitCriterion, which_max};
use crate::math::num::FloatExt;
use crate::tree::decision_tree_classifier::{
which_max, DecisionTreeClassifier, DecisionTreeClassifierParameters, SplitCriterion,
};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct RandomForestClassifierParameters {
pub criterion: SplitCriterion,
pub struct RandomForestClassifierParameters {
pub criterion: SplitCriterion,
pub max_depth: Option<u16>,
pub min_samples_leaf: usize,
pub min_samples_split: usize,
pub n_trees: u16,
pub mtry: Option<usize>
pub min_samples_leaf: usize,
pub min_samples_split: usize,
pub n_trees: u16,
pub mtry: Option<usize>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct RandomForestClassifier<T: FloatExt> {
pub struct RandomForestClassifier<T: FloatExt> {
parameters: RandomForestClassifierParameters,
trees: Vec<DecisionTreeClassifier<T>>,
classes: Vec<T>
classes: Vec<T>,
}
impl<T: FloatExt> PartialEq for RandomForestClassifier<T> {
impl<T: FloatExt> PartialEq for RandomForestClassifier<T> {
fn eq(&self, other: &Self) -> bool {
if self.classes.len() != other.classes.len() ||
self.trees.len() != other.trees.len() {
return false
if self.classes.len() != other.classes.len() || self.trees.len() != other.trees.len() {
return false;
} else {
for i in 0..self.classes.len() {
if (self.classes[i] - other.classes[i]).abs() > T::epsilon() {
return false
return false;
}
}
for i in 0..self.trees.len() {
if self.trees[i] != other.trees[i] {
return false
return false;
}
}
true
@@ -49,45 +50,54 @@ impl<T: FloatExt> PartialEq for RandomForestClassifier<T> {
}
impl Default for RandomForestClassifierParameters {
fn default() -> Self {
fn default() -> Self {
RandomForestClassifierParameters {
criterion: SplitCriterion::Gini,
max_depth: None,
min_samples_leaf: 1,
min_samples_split: 2,
n_trees: 100,
mtry: Option::None
mtry: Option::None,
}
}
}
}
impl<T: FloatExt> RandomForestClassifier<T> {
pub fn fit<M: Matrix<T>>(x: &M, y: &M::RowVector, parameters: RandomForestClassifierParameters) -> RandomForestClassifier<T> {
pub fn fit<M: Matrix<T>>(
x: &M,
y: &M::RowVector,
parameters: RandomForestClassifierParameters,
) -> RandomForestClassifier<T> {
let (_, num_attributes) = x.shape();
let y_m = M::from_row_vector(y.clone());
let (_, y_ncols) = y_m.shape();
let mut yi: Vec<usize> = vec![0; y_ncols];
let classes = y_m.unique();
let classes = y_m.unique();
for i in 0..y_ncols {
let yc = y_m.get(0, i);
yi[i] = classes.iter().position(|c| yc == *c).unwrap();
let yc = y_m.get(0, i);
yi[i] = classes.iter().position(|c| yc == *c).unwrap();
}
let mtry = parameters.mtry.unwrap_or((T::from(num_attributes).unwrap()).sqrt().floor().to_usize().unwrap());
let classes = y_m.unique();
let k = classes.len();
let mtry = parameters.mtry.unwrap_or(
(T::from(num_attributes).unwrap())
.sqrt()
.floor()
.to_usize()
.unwrap(),
);
let classes = y_m.unique();
let k = classes.len();
let mut trees: Vec<DecisionTreeClassifier<T>> = Vec::new();
for _ in 0..parameters.n_trees {
let samples = RandomForestClassifier::<T>::sample_with_replacement(&yi, k);
let params = DecisionTreeClassifierParameters{
let params = DecisionTreeClassifierParameters {
criterion: parameters.criterion.clone(),
max_depth: parameters.max_depth,
min_samples_leaf: parameters.min_samples_leaf,
min_samples_split: parameters.min_samples_split
min_samples_leaf: parameters.min_samples_leaf,
min_samples_split: parameters.min_samples_split,
};
let tree = DecisionTreeClassifier::fit_weak_learner(x, y, samples, mtry, params);
trees.push(tree);
@@ -96,13 +106,13 @@ impl<T: FloatExt> RandomForestClassifier<T> {
RandomForestClassifier {
parameters: parameters,
trees: trees,
classes
classes,
}
}
pub fn predict<M: Matrix<T>>(&self, x: &M) -> M::RowVector {
let mut result = M::zeros(1, x.shape().0);
let mut result = M::zeros(1, x.shape().0);
let (n, _) = x.shape();
for i in 0..n {
@@ -110,20 +120,19 @@ impl<T: FloatExt> RandomForestClassifier<T> {
}
result.to_row_vector()
}
}
fn predict_for_row<M: Matrix<T>>(&self, x: &M, row: usize) -> usize {
let mut result = vec![0; self.classes.len()];
for tree in self.trees.iter() {
result[tree.predict_for_row(x, row)] += 1;
}
}
return which_max(&result)
}
fn sample_with_replacement(y: &Vec<usize>, num_classes: usize) -> Vec<usize>{
return which_max(&result);
}
fn sample_with_replacement(y: &Vec<usize>, num_classes: usize) -> Vec<usize> {
let mut rng = rand::thread_rng();
let class_weight = vec![1.; num_classes];
let nrows = y.len();
@@ -137,8 +146,8 @@ impl<T: FloatExt> RandomForestClassifier<T> {
nj += 1;
}
}
let size = ((nj as f64) / class_weight[l]) as usize;
let size = ((nj as f64) / class_weight[l]) as usize;
for _ in 0..size {
let xi: usize = rng.gen_range(0, nj);
samples[cj[xi]] += 1;
@@ -146,17 +155,15 @@ impl<T: FloatExt> RandomForestClassifier<T> {
}
samples
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::*;
use crate::linalg::naive::dense_matrix::DenseMatrix;
#[test]
fn fit_predict_iris() {
fn fit_predict_iris() {
let x = DenseMatrix::from_array(&[
&[5.1, 3.5, 1.4, 0.2],
&[4.9, 3.0, 1.4, 0.2],
@@ -177,24 +184,30 @@ mod tests {
&[6.3, 3.3, 4.7, 1.6],
&[4.9, 2.4, 3.3, 1.0],
&[6.6, 2.9, 4.6, 1.3],
&[5.2, 2.7, 3.9, 1.4]]);
let y = vec![0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.];
&[5.2, 2.7, 3.9, 1.4],
]);
let y = vec![
0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
];
let classifier = RandomForestClassifier::fit(&x, &y, RandomForestClassifierParameters{
criterion: SplitCriterion::Gini,
max_depth: None,
min_samples_leaf: 1,
min_samples_split: 2,
n_trees: 1000,
mtry: Option::None
});
let classifier = RandomForestClassifier::fit(
&x,
&y,
RandomForestClassifierParameters {
criterion: SplitCriterion::Gini,
max_depth: None,
min_samples_leaf: 1,
min_samples_split: 2,
n_trees: 1000,
mtry: Option::None,
},
);
assert_eq!(y, classifier.predict(&x));
assert_eq!(y, classifier.predict(&x));
}
#[test]
fn serde() {
fn serde() {
let x = DenseMatrix::from_array(&[
&[5.1, 3.5, 1.4, 0.2],
&[4.9, 3.0, 1.4, 0.2],
@@ -215,15 +228,17 @@ mod tests {
&[6.3, 3.3, 4.7, 1.6],
&[4.9, 2.4, 3.3, 1.0],
&[6.6, 2.9, 4.6, 1.3],
&[5.2, 2.7, 3.9, 1.4]]);
let y = vec![0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.];
&[5.2, 2.7, 3.9, 1.4],
]);
let y = vec![
0., 0., 0., 0., 0., 0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,
];
let forest = RandomForestClassifier::fit(&x, &y, Default::default());
let deserialized_forest: RandomForestClassifier<f64> = bincode::deserialize(&bincode::serialize(&forest).unwrap()).unwrap();
let deserialized_forest: RandomForestClassifier<f64> =
bincode::deserialize(&bincode::serialize(&forest).unwrap()).unwrap();
assert_eq!(forest, deserialized_forest);
assert_eq!(forest, deserialized_forest);
}
}
}
+138 -111
View File
@@ -4,47 +4,49 @@ use std::default::Default;
use std::fmt::Debug;
use rand::Rng;
use serde::{Serialize, Deserialize};
use serde::{Deserialize, Serialize};
use crate::math::num::FloatExt;
use crate::linalg::Matrix;
use crate::tree::decision_tree_regressor::{DecisionTreeRegressor, DecisionTreeRegressorParameters};
use crate::math::num::FloatExt;
use crate::tree::decision_tree_regressor::{
DecisionTreeRegressor, DecisionTreeRegressorParameters,
};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct RandomForestRegressorParameters {
pub struct RandomForestRegressorParameters {
pub max_depth: Option<u16>,
pub min_samples_leaf: usize,
pub min_samples_split: usize,
pub n_trees: usize,
pub mtry: Option<usize>
pub min_samples_leaf: usize,
pub min_samples_split: usize,
pub n_trees: usize,
pub mtry: Option<usize>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct RandomForestRegressor<T: FloatExt> {
pub struct RandomForestRegressor<T: FloatExt> {
parameters: RandomForestRegressorParameters,
trees: Vec<DecisionTreeRegressor<T>>
trees: Vec<DecisionTreeRegressor<T>>,
}
impl Default for RandomForestRegressorParameters {
fn default() -> Self {
RandomForestRegressorParameters {
fn default() -> Self {
RandomForestRegressorParameters {
max_depth: None,
min_samples_leaf: 1,
min_samples_split: 2,
n_trees: 10,
mtry: Option::None
mtry: Option::None,
}
}
}
}
impl<T: FloatExt> PartialEq for RandomForestRegressor<T> {
impl<T: FloatExt> PartialEq for RandomForestRegressor<T> {
fn eq(&self, other: &Self) -> bool {
if self.trees.len() != other.trees.len() {
return false
return false;
} else {
for i in 0..self.trees.len() {
if self.trees[i] != other.trees[i] {
return false
return false;
}
}
true
@@ -53,20 +55,25 @@ impl<T: FloatExt> PartialEq for RandomForestRegressor<T> {
}
impl<T: FloatExt> RandomForestRegressor<T> {
pub fn fit<M: Matrix<T>>(
x: &M,
y: &M::RowVector,
parameters: RandomForestRegressorParameters,
) -> RandomForestRegressor<T> {
let (n_rows, num_attributes) = x.shape();
pub fn fit<M: Matrix<T>>(x: &M, y: &M::RowVector, parameters: RandomForestRegressorParameters) -> RandomForestRegressor<T> {
let (n_rows, num_attributes) = x.shape();
let mtry = parameters.mtry.unwrap_or((num_attributes as f64).sqrt().floor() as usize);
let mtry = parameters
.mtry
.unwrap_or((num_attributes as f64).sqrt().floor() as usize);
let mut trees: Vec<DecisionTreeRegressor<T>> = Vec::new();
for _ in 0..parameters.n_trees {
let samples = RandomForestRegressor::<T>::sample_with_replacement(n_rows);
let params = DecisionTreeRegressorParameters{
let params = DecisionTreeRegressorParameters {
max_depth: parameters.max_depth,
min_samples_leaf: parameters.min_samples_leaf,
min_samples_split: parameters.min_samples_split
min_samples_leaf: parameters.min_samples_leaf,
min_samples_split: parameters.min_samples_split,
};
let tree = DecisionTreeRegressor::fit_weak_learner(x, y, samples, mtry, params);
trees.push(tree);
@@ -74,13 +81,13 @@ impl<T: FloatExt> RandomForestRegressor<T> {
RandomForestRegressor {
parameters: parameters,
trees: trees
trees: trees,
}
}
pub fn predict<M: Matrix<T>>(&self, x: &M) -> M::RowVector {
let mut result = M::zeros(1, x.shape().0);
let mut result = M::zeros(1, x.shape().0);
let (n, _) = x.shape();
for i in 0..n {
@@ -88,23 +95,21 @@ impl<T: FloatExt> RandomForestRegressor<T> {
}
result.to_row_vector()
}
}
fn predict_for_row<M: Matrix<T>>(&self, x: &M, row: usize) -> T {
fn predict_for_row<M: Matrix<T>>(&self, x: &M, row: usize) -> T {
let n_trees = self.trees.len();
let mut result = T::zero();
for tree in self.trees.iter() {
result = result + tree.predict_for_row(x, row);
}
}
result / T::from(n_trees).unwrap()
}
fn sample_with_replacement(nrows: usize) -> Vec<usize>{
}
fn sample_with_replacement(nrows: usize) -> Vec<usize> {
let mut rng = rand::thread_rng();
let mut samples = vec![0; nrows];
for _ in 0..nrows {
@@ -113,116 +118,138 @@ impl<T: FloatExt> RandomForestRegressor<T> {
}
samples
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::*;
use crate::linalg::naive::dense_matrix::DenseMatrix;
use ndarray::{arr1, arr2};
use ndarray::{arr1, arr2};
#[test]
fn fit_longley() {
fn fit_longley() {
let x = DenseMatrix::from_array(&[
&[ 234.289, 235.6, 159., 107.608, 1947., 60.323],
&[ 259.426, 232.5, 145.6, 108.632, 1948., 61.122],
&[ 258.054, 368.2, 161.6, 109.773, 1949., 60.171],
&[ 284.599, 335.1, 165., 110.929, 1950., 61.187],
&[ 328.975, 209.9, 309.9, 112.075, 1951., 63.221],
&[ 346.999, 193.2, 359.4, 113.27 , 1952., 63.639],
&[ 365.385, 187., 354.7, 115.094, 1953., 64.989],
&[ 363.112, 357.8, 335., 116.219, 1954., 63.761],
&[ 397.469, 290.4, 304.8, 117.388, 1955., 66.019],
&[ 419.18 , 282.2, 285.7, 118.734, 1956., 67.857],
&[ 442.769, 293.6, 279.8, 120.445, 1957., 68.169],
&[ 444.546, 468.1, 263.7, 121.95 , 1958., 66.513],
&[ 482.704, 381.3, 255.2, 123.366, 1959., 68.655],
&[ 502.601, 393.1, 251.4, 125.368, 1960., 69.564],
&[ 518.173, 480.6, 257.2, 127.852, 1961., 69.331],
&[ 554.894, 400.7, 282.7, 130.081, 1962., 70.551]]);
let y = vec![83.0, 88.5, 88.2, 89.5, 96.2, 98.1, 99.0, 100.0, 101.2, 104.6, 108.4, 110.8, 112.6, 114.2, 115.7, 116.9];
&[234.289, 235.6, 159., 107.608, 1947., 60.323],
&[259.426, 232.5, 145.6, 108.632, 1948., 61.122],
&[258.054, 368.2, 161.6, 109.773, 1949., 60.171],
&[284.599, 335.1, 165., 110.929, 1950., 61.187],
&[328.975, 209.9, 309.9, 112.075, 1951., 63.221],
&[346.999, 193.2, 359.4, 113.27, 1952., 63.639],
&[365.385, 187., 354.7, 115.094, 1953., 64.989],
&[363.112, 357.8, 335., 116.219, 1954., 63.761],
&[397.469, 290.4, 304.8, 117.388, 1955., 66.019],
&[419.18, 282.2, 285.7, 118.734, 1956., 67.857],
&[442.769, 293.6, 279.8, 120.445, 1957., 68.169],
&[444.546, 468.1, 263.7, 121.95, 1958., 66.513],
&[482.704, 381.3, 255.2, 123.366, 1959., 68.655],
&[502.601, 393.1, 251.4, 125.368, 1960., 69.564],
&[518.173, 480.6, 257.2, 127.852, 1961., 69.331],
&[554.894, 400.7, 282.7, 130.081, 1962., 70.551],
]);
let y = vec![
83.0, 88.5, 88.2, 89.5, 96.2, 98.1, 99.0, 100.0, 101.2, 104.6, 108.4, 110.8, 112.6,
114.2, 115.7, 116.9,
];
let expected_y: Vec<f64> = vec![85., 88., 88., 89., 97., 98., 99., 99., 102., 104., 109., 110., 113., 114., 115., 116.];
let expected_y: Vec<f64> = vec![
85., 88., 88., 89., 97., 98., 99., 99., 102., 104., 109., 110., 113., 114., 115., 116.,
];
let y_hat = RandomForestRegressor::fit(&x, &y,
RandomForestRegressorParameters{max_depth: None,
let y_hat = RandomForestRegressor::fit(
&x,
&y,
RandomForestRegressorParameters {
max_depth: None,
min_samples_leaf: 1,
min_samples_split: 2,
n_trees: 1000,
mtry: Option::None}).predict(&x);
mtry: Option::None,
},
)
.predict(&x);
for i in 0..y_hat.len() {
assert!((y_hat[i] - expected_y[i]).abs() < 1.0);
}
}
#[test]
fn my_fit_longley_ndarray() {
fn my_fit_longley_ndarray() {
let x = arr2(&[
[ 234.289, 235.6, 159., 107.608, 1947., 60.323],
[ 259.426, 232.5, 145.6, 108.632, 1948., 61.122],
[ 258.054, 368.2, 161.6, 109.773, 1949., 60.171],
[ 284.599, 335.1, 165., 110.929, 1950., 61.187],
[ 328.975, 209.9, 309.9, 112.075, 1951., 63.221],
[ 346.999, 193.2, 359.4, 113.27 , 1952., 63.639],
[ 365.385, 187., 354.7, 115.094, 1953., 64.989],
[ 363.112, 357.8, 335., 116.219, 1954., 63.761],
[ 397.469, 290.4, 304.8, 117.388, 1955., 66.019],
[ 419.18 , 282.2, 285.7, 118.734, 1956., 67.857],
[ 442.769, 293.6, 279.8, 120.445, 1957., 68.169],
[ 444.546, 468.1, 263.7, 121.95 , 1958., 66.513],
[ 482.704, 381.3, 255.2, 123.366, 1959., 68.655],
[ 502.601, 393.1, 251.4, 125.368, 1960., 69.564],
[ 518.173, 480.6, 257.2, 127.852, 1961., 69.331],
[ 554.894, 400.7, 282.7, 130.081, 1962., 70.551]]);
let y = arr1(&[83.0, 88.5, 88.2, 89.5, 96.2, 98.1, 99.0, 100.0, 101.2, 104.6, 108.4, 110.8, 112.6, 114.2, 115.7, 116.9]);
[234.289, 235.6, 159., 107.608, 1947., 60.323],
[259.426, 232.5, 145.6, 108.632, 1948., 61.122],
[258.054, 368.2, 161.6, 109.773, 1949., 60.171],
[284.599, 335.1, 165., 110.929, 1950., 61.187],
[328.975, 209.9, 309.9, 112.075, 1951., 63.221],
[346.999, 193.2, 359.4, 113.27, 1952., 63.639],
[365.385, 187., 354.7, 115.094, 1953., 64.989],
[363.112, 357.8, 335., 116.219, 1954., 63.761],
[397.469, 290.4, 304.8, 117.388, 1955., 66.019],
[419.18, 282.2, 285.7, 118.734, 1956., 67.857],
[442.769, 293.6, 279.8, 120.445, 1957., 68.169],
[444.546, 468.1, 263.7, 121.95, 1958., 66.513],
[482.704, 381.3, 255.2, 123.366, 1959., 68.655],
[502.601, 393.1, 251.4, 125.368, 1960., 69.564],
[518.173, 480.6, 257.2, 127.852, 1961., 69.331],
[554.894, 400.7, 282.7, 130.081, 1962., 70.551],
]);
let y = arr1(&[
83.0, 88.5, 88.2, 89.5, 96.2, 98.1, 99.0, 100.0, 101.2, 104.6, 108.4, 110.8, 112.6,
114.2, 115.7, 116.9,
]);
let expected_y: Vec<f64> = vec![85., 88., 88., 89., 97., 98., 99., 99., 102., 104., 109., 110., 113., 114., 115., 116.];
let expected_y: Vec<f64> = vec![
85., 88., 88., 89., 97., 98., 99., 99., 102., 104., 109., 110., 113., 114., 115., 116.,
];
let y_hat = RandomForestRegressor::fit(&x, &y,
RandomForestRegressorParameters{max_depth: None,
let y_hat = RandomForestRegressor::fit(
&x,
&y,
RandomForestRegressorParameters {
max_depth: None,
min_samples_leaf: 1,
min_samples_split: 2,
n_trees: 1000,
mtry: Option::None}).predict(&x);
mtry: Option::None,
},
)
.predict(&x);
for i in 0..y_hat.len() {
assert!((y_hat[i] - expected_y[i]).abs() < 1.0);
}
}
#[test]
fn serde() {
fn serde() {
let x = DenseMatrix::from_array(&[
&[ 234.289, 235.6, 159., 107.608, 1947., 60.323],
&[ 259.426, 232.5, 145.6, 108.632, 1948., 61.122],
&[ 258.054, 368.2, 161.6, 109.773, 1949., 60.171],
&[ 284.599, 335.1, 165., 110.929, 1950., 61.187],
&[ 328.975, 209.9, 309.9, 112.075, 1951., 63.221],
&[ 346.999, 193.2, 359.4, 113.27 , 1952., 63.639],
&[ 365.385, 187., 354.7, 115.094, 1953., 64.989],
&[ 363.112, 357.8, 335., 116.219, 1954., 63.761],
&[ 397.469, 290.4, 304.8, 117.388, 1955., 66.019],
&[ 419.18 , 282.2, 285.7, 118.734, 1956., 67.857],
&[ 442.769, 293.6, 279.8, 120.445, 1957., 68.169],
&[ 444.546, 468.1, 263.7, 121.95 , 1958., 66.513],
&[ 482.704, 381.3, 255.2, 123.366, 1959., 68.655],
&[ 502.601, 393.1, 251.4, 125.368, 1960., 69.564],
&[ 518.173, 480.6, 257.2, 127.852, 1961., 69.331],
&[ 554.894, 400.7, 282.7, 130.081, 1962., 70.551]]);
let y = vec![83.0, 88.5, 88.2, 89.5, 96.2, 98.1, 99.0, 100.0, 101.2, 104.6, 108.4, 110.8, 112.6, 114.2, 115.7, 116.9];
&[234.289, 235.6, 159., 107.608, 1947., 60.323],
&[259.426, 232.5, 145.6, 108.632, 1948., 61.122],
&[258.054, 368.2, 161.6, 109.773, 1949., 60.171],
&[284.599, 335.1, 165., 110.929, 1950., 61.187],
&[328.975, 209.9, 309.9, 112.075, 1951., 63.221],
&[346.999, 193.2, 359.4, 113.27, 1952., 63.639],
&[365.385, 187., 354.7, 115.094, 1953., 64.989],
&[363.112, 357.8, 335., 116.219, 1954., 63.761],
&[397.469, 290.4, 304.8, 117.388, 1955., 66.019],
&[419.18, 282.2, 285.7, 118.734, 1956., 67.857],
&[442.769, 293.6, 279.8, 120.445, 1957., 68.169],
&[444.546, 468.1, 263.7, 121.95, 1958., 66.513],
&[482.704, 381.3, 255.2, 123.366, 1959., 68.655],
&[502.601, 393.1, 251.4, 125.368, 1960., 69.564],
&[518.173, 480.6, 257.2, 127.852, 1961., 69.331],
&[554.894, 400.7, 282.7, 130.081, 1962., 70.551],
]);
let y = vec![
83.0, 88.5, 88.2, 89.5, 96.2, 98.1, 99.0, 100.0, 101.2, 104.6, 108.4, 110.8, 112.6,
114.2, 115.7, 116.9,
];
let forest = RandomForestRegressor::fit(&x, &y, Default::default());
let deserialized_forest: RandomForestRegressor<f64> = bincode::deserialize(&bincode::serialize(&forest).unwrap()).unwrap();
let deserialized_forest: RandomForestRegressor<f64> =
bincode::deserialize(&bincode::serialize(&forest).unwrap()).unwrap();
assert_eq!(forest, deserialized_forest);
assert_eq!(forest, deserialized_forest);
}
}
}