diff --git a/src/naive_bayes/bernoulli.rs b/src/naive_bayes/bernoulli.rs index 02bf330..27731b2 100644 --- a/src/naive_bayes/bernoulli.rs +++ b/src/naive_bayes/bernoulli.rs @@ -364,6 +364,20 @@ pub struct BernoulliNB< binarize: Option, } +impl, Y: Array1> + fmt::Display for BernoulliNB +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!( + f, + "BernoulliNB:\ninner: {:?}\nbinarize: {:?}", + self.inner.as_ref().unwrap(), + self.binarize.as_ref().unwrap() + )?; + Ok(()) + } +} + impl, Y: Array1> SupervisedEstimator> for BernoulliNB { @@ -594,6 +608,9 @@ mod tests { ] ); + // test Display + println!("{}", &bnb); + let distribution = bnb.inner.clone().unwrap().distribution; assert_eq!( diff --git a/src/naive_bayes/categorical.rs b/src/naive_bayes/categorical.rs index f2ae4a8..970f799 100644 --- a/src/naive_bayes/categorical.rs +++ b/src/naive_bayes/categorical.rs @@ -139,6 +139,17 @@ impl NBDistribution for CategoricalNBDistribution } } +impl, Y: Array1> fmt::Display for CategoricalNB { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!( + f, + "CategoricalNB:\ninner: {:?}", + self.inner.as_ref().unwrap() + )?; + Ok(()) + } +} + impl CategoricalNBDistribution { /// Fits the distribution to a NxM matrix where N is number of samples and M is number of features. /// * `x` - training data. @@ -539,6 +550,8 @@ mod tests { let cnb = CategoricalNB::fit(&x, &y, Default::default()).unwrap(); let y_hat = cnb.predict(&x).unwrap(); assert_eq!(y_hat, vec![0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1]); + + println!("{}", &cnb); } #[cfg_attr( diff --git a/src/naive_bayes/gaussian.rs b/src/naive_bayes/gaussian.rs index f23ffdb..a9c1d4f 100644 --- a/src/naive_bayes/gaussian.rs +++ b/src/naive_bayes/gaussian.rs @@ -271,6 +271,19 @@ pub struct GaussianNB< inner: Option>>, } +impl< + TX: Number + RealNumber + RealNumber, + TY: Number + Ord + Unsigned, + X: Array2, + Y: Array1, + > fmt::Display for GaussianNB +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "GaussianNB:\ninner: {:?}", self.inner.as_ref().unwrap())?; + Ok(()) + } +} + impl< TX: Number + RealNumber + RealNumber, TY: Number + Ord + Unsigned, @@ -433,6 +446,9 @@ mod tests { let gnb = GaussianNB::fit(&x, &y, parameters).unwrap(); assert_eq!(gnb.class_priors(), &priors); + + // test display for GNB + println!("{}", &gnb); } #[cfg_attr( diff --git a/src/naive_bayes/multinomial.rs b/src/naive_bayes/multinomial.rs index f3305ac..4191106 100644 --- a/src/naive_bayes/multinomial.rs +++ b/src/naive_bayes/multinomial.rs @@ -309,6 +309,19 @@ pub struct MultinomialNB< inner: Option>>, } +impl, Y: Array1> fmt::Display + for MultinomialNB +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!( + f, + "MultinomialNB:\ninner: {:?}", + self.inner.as_ref().unwrap() + )?; + Ok(()) + } +} + impl, Y: Array1> SupervisedEstimator for MultinomialNB { @@ -500,6 +513,9 @@ mod tests { ] ); + // test display + println!("{}", &nb); + let y_hat = nb.predict(&x).unwrap(); let distribution = nb.inner.clone().unwrap().distribution;