Implement Display for NaiveBayes
This commit is contained in:
@@ -364,6 +364,20 @@ pub struct BernoulliNB<
|
||||
binarize: Option<TX>,
|
||||
}
|
||||
|
||||
impl<TX: Number + PartialOrd, TY: Number + Ord + Unsigned, X: Array2<TX>, Y: Array1<TY>>
|
||||
fmt::Display for BernoulliNB<TX, TY, X, Y>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(
|
||||
f,
|
||||
"BernoulliNB:\ninner: {:?}\nbinarize: {:?}",
|
||||
self.inner.as_ref().unwrap(),
|
||||
self.binarize.as_ref().unwrap()
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<TX: Number + PartialOrd, TY: Number + Ord + Unsigned, X: Array2<TX>, Y: Array1<TY>>
|
||||
SupervisedEstimator<X, Y, BernoulliNBParameters<TX>> for BernoulliNB<TX, TY, X, Y>
|
||||
{
|
||||
@@ -594,6 +608,9 @@ mod tests {
|
||||
]
|
||||
);
|
||||
|
||||
// test Display
|
||||
println!("{}", &bnb);
|
||||
|
||||
let distribution = bnb.inner.clone().unwrap().distribution;
|
||||
|
||||
assert_eq!(
|
||||
|
||||
@@ -139,6 +139,17 @@ impl<T: Number + Unsigned> NBDistribution<T, T> for CategoricalNBDistribution<T>
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Number + Unsigned, X: Array2<T>, Y: Array1<T>> fmt::Display for CategoricalNB<T, X, Y> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(
|
||||
f,
|
||||
"CategoricalNB:\ninner: {:?}",
|
||||
self.inner.as_ref().unwrap()
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Number + Unsigned> CategoricalNBDistribution<T> {
|
||||
/// Fits the distribution to a NxM matrix where N is number of samples and M is number of features.
|
||||
/// * `x` - training data.
|
||||
@@ -539,6 +550,8 @@ mod tests {
|
||||
let cnb = CategoricalNB::fit(&x, &y, Default::default()).unwrap();
|
||||
let y_hat = cnb.predict(&x).unwrap();
|
||||
assert_eq!(y_hat, vec![0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1]);
|
||||
|
||||
println!("{}", &cnb);
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
|
||||
@@ -271,6 +271,19 @@ pub struct GaussianNB<
|
||||
inner: Option<BaseNaiveBayes<TX, TY, X, Y, GaussianNBDistribution<TY>>>,
|
||||
}
|
||||
|
||||
impl<
|
||||
TX: Number + RealNumber + RealNumber,
|
||||
TY: Number + Ord + Unsigned,
|
||||
X: Array2<TX>,
|
||||
Y: Array1<TY>,
|
||||
> fmt::Display for GaussianNB<TX, TY, X, Y>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "GaussianNB:\ninner: {:?}", self.inner.as_ref().unwrap())?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<
|
||||
TX: Number + RealNumber + RealNumber,
|
||||
TY: Number + Ord + Unsigned,
|
||||
@@ -433,6 +446,9 @@ mod tests {
|
||||
let gnb = GaussianNB::fit(&x, &y, parameters).unwrap();
|
||||
|
||||
assert_eq!(gnb.class_priors(), &priors);
|
||||
|
||||
// test display for GNB
|
||||
println!("{}", &gnb);
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
|
||||
@@ -309,6 +309,19 @@ pub struct MultinomialNB<
|
||||
inner: Option<BaseNaiveBayes<TX, TY, X, Y, MultinomialNBDistribution<TY>>>,
|
||||
}
|
||||
|
||||
impl<TX: Number + Unsigned, TY: Number + Ord + Unsigned, X: Array2<TX>, Y: Array1<TY>> fmt::Display
|
||||
for MultinomialNB<TX, TY, X, Y>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(
|
||||
f,
|
||||
"MultinomialNB:\ninner: {:?}",
|
||||
self.inner.as_ref().unwrap()
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<TX: Number + Unsigned, TY: Number + Ord + Unsigned, X: Array2<TX>, Y: Array1<TY>>
|
||||
SupervisedEstimator<X, Y, MultinomialNBParameters> for MultinomialNB<TX, TY, X, Y>
|
||||
{
|
||||
@@ -500,6 +513,9 @@ mod tests {
|
||||
]
|
||||
);
|
||||
|
||||
// test display
|
||||
println!("{}", &nb);
|
||||
|
||||
let y_hat = nb.predict(&x).unwrap();
|
||||
|
||||
let distribution = nb.inner.clone().unwrap().distribution;
|
||||
|
||||
Reference in New Issue
Block a user