fix: formatting

This commit is contained in:
Volodymyr Orlov
2020-08-27 11:40:11 -07:00
parent aa458d22fa
commit f73b349f57
4 changed files with 59 additions and 40 deletions
+20 -20
View File
@@ -2,9 +2,9 @@
#![warn(missing_doc_code_examples)] #![warn(missing_doc_code_examples)]
//! # SmartCore //! # SmartCore
//! //!
//! Welcome to SmartCore library, the most complete machine learning library for Rust! //! Welcome to SmartCore library, the most complete machine learning library for Rust!
//! //!
//! In SmartCore you will find implementation of these ML algorithms: //! In SmartCore you will find implementation of these ML algorithms:
//! * Regression: Linear Regression (OLS), Decision Tree Regressor, Random Forest Regressor //! * Regression: Linear Regression (OLS), Decision Tree Regressor, Random Forest Regressor
//! * Classification: Logistic Regressor, Decision Tree Classifier, Random Forest Classifier, Unsupervised Nearest Neighbors (KNN) //! * Classification: Logistic Regressor, Decision Tree Classifier, Random Forest Classifier, Unsupervised Nearest Neighbors (KNN)
@@ -12,33 +12,33 @@
//! * Matrix decomposition: PCA, LU, QR, SVD, EVD //! * Matrix decomposition: PCA, LU, QR, SVD, EVD
//! * Distance Metrics: Euclidian, Minkowski, Manhattan, Hamming, Mahalanobis //! * Distance Metrics: Euclidian, Minkowski, Manhattan, Hamming, Mahalanobis
//! * Evaluation Metrics: Accuracy, AUC, Recall, Precision, F1, Mean Absolute Error, Mean Squared Error, R2 //! * Evaluation Metrics: Accuracy, AUC, Recall, Precision, F1, Mean Absolute Error, Mean Squared Error, R2
//! //!
//! Most of algorithms implemented in SmartCore operate on n-dimentional arrays. While you can use Rust vectors with all functions defined in this library //! Most of algorithms implemented in SmartCore operate on n-dimentional arrays. While you can use Rust vectors with all functions defined in this library
//! we do recommend to go with one of the popular linear algebra libraries available in Rust. At this moment we support these packages: //! we do recommend to go with one of the popular linear algebra libraries available in Rust. At this moment we support these packages:
//! * [ndarray](https://docs.rs/ndarray) //! * [ndarray](https://docs.rs/ndarray)
//! * [nalgebra](https://docs.rs/nalgebra/) //! * [nalgebra](https://docs.rs/nalgebra/)
//! //!
//! ## Getting Started //! ## Getting Started
//! //!
//! To start using SmartCore simply add the following to your Cargo.toml file: //! To start using SmartCore simply add the following to your Cargo.toml file:
//! ```ignore //! ```ignore
//! [dependencies] //! [dependencies]
//! smartcore = "0.1.0" //! smartcore = "0.1.0"
//! ``` //! ```
//! //!
//! All ML algorithms in SmartCore are grouped into these generic categories: //! All ML algorithms in SmartCore are grouped into these generic categories:
//! * [Clustering](cluster/index.html), unsupervised clustering of unlabeled data. //! * [Clustering](cluster/index.html), unsupervised clustering of unlabeled data.
//! * [Martix Decomposition](decomposition/index.html), various methods for matrix decomposition. //! * [Martix Decomposition](decomposition/index.html), various methods for matrix decomposition.
//! * [Linear Models](linear/index.html), regression and classification methods where output is assumed to have linear relation to explanatory variables //! * [Linear Models](linear/index.html), regression and classification methods where output is assumed to have linear relation to explanatory variables
//! * [Ensemble Models](ensemble/index.html), variety of regression and classification ensemble models //! * [Ensemble Models](ensemble/index.html), variety of regression and classification ensemble models
//! * [Tree-based Models](tree/index.html), classification and regression trees //! * [Tree-based Models](tree/index.html), classification and regression trees
//! * [Nearest Neighbors](neighbors/index.html), K Nearest Neighbors for classification and regression //! * [Nearest Neighbors](neighbors/index.html), K Nearest Neighbors for classification and regression
//! //!
//! Each category is assigned to a separate module. //! Each category is assigned to a separate module.
//! //!
//! For example, KNN classifier is defined in [smartcore::neighbors::knn](neighbors/knn/index.html). To train and run it using standard Rust vectors you will //! For example, KNN classifier is defined in [smartcore::neighbors::knn](neighbors/knn/index.html). To train and run it using standard Rust vectors you will
//! run this code: //! run this code:
//! //!
//! ``` //! ```
//! // DenseMatrix defenition //! // DenseMatrix defenition
//! use smartcore::linalg::naive::dense_matrix::*; //! use smartcore::linalg::naive::dense_matrix::*;
@@ -46,20 +46,20 @@
//! use smartcore::neighbors::knn::*; //! use smartcore::neighbors::knn::*;
//! // Various distance metrics //! // Various distance metrics
//! use smartcore::math::distance::*; //! use smartcore::math::distance::*;
//! //!
//! // Turn Rust vectors with samples into a matrix //! // Turn Rust vectors with samples into a matrix
//! let x = DenseMatrix::from_array(&[ //! let x = DenseMatrix::from_array(&[
//! &[1., 2.], //! &[1., 2.],
//! &[3., 4.], //! &[3., 4.],
//! &[5., 6.], //! &[5., 6.],
//! &[7., 8.], //! &[7., 8.],
//! &[9., 10.]]); //! &[9., 10.]]);
//! // Our classes are defined as a Vector //! // Our classes are defined as a Vector
//! let y = vec![2., 2., 2., 3., 3.]; //! let y = vec![2., 2., 2., 3., 3.];
//! //!
//! // Train classifier //! // Train classifier
//! let knn = KNNClassifier::fit(&x, &y, Distances::euclidian(), Default::default()); //! let knn = KNNClassifier::fit(&x, &y, Distances::euclidian(), Default::default());
//! //!
//! // Predict classes //! // Predict classes
//! let y_hat = knn.predict(&x); //! let y_hat = knn.predict(&x);
//! ``` //! ```
+24 -6
View File
@@ -12,7 +12,7 @@ pub enum LinearRegressionSolverName {
} }
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct LinearRegressionParameters { pub struct LinearRegressionParameters {
solver: LinearRegressionSolverName, solver: LinearRegressionSolverName,
} }
@@ -25,8 +25,8 @@ pub struct LinearRegression<T: FloatExt, M: Matrix<T>> {
impl Default for LinearRegressionParameters { impl Default for LinearRegressionParameters {
fn default() -> Self { fn default() -> Self {
LinearRegressionParameters { LinearRegressionParameters {
solver: LinearRegressionSolverName::SVD solver: LinearRegressionSolverName::SVD,
} }
} }
} }
@@ -39,7 +39,11 @@ impl<T: FloatExt, M: Matrix<T>> PartialEq for LinearRegression<T, M> {
} }
impl<T: FloatExt, M: Matrix<T>> LinearRegression<T, M> { impl<T: FloatExt, M: Matrix<T>> LinearRegression<T, M> {
pub fn fit(x: &M, y: &M::RowVector, parameters: LinearRegressionParameters) -> LinearRegression<T, M> { pub fn fit(
x: &M,
y: &M::RowVector,
parameters: LinearRegressionParameters,
) -> LinearRegression<T, M> {
let y_m = M::from_row_vector(y.clone()); let y_m = M::from_row_vector(y.clone());
let b = y_m.transpose(); let b = y_m.transpose();
let (x_nrows, num_attributes) = x.shape(); let (x_nrows, num_attributes) = x.shape();
@@ -103,7 +107,14 @@ mod tests {
114.2, 115.7, 116.9, 114.2, 115.7, 116.9,
]); ]);
let y_hat_qr = LinearRegression::fit(&x, &y, LinearRegressionParameters{solver: LinearRegressionSolverName::QR}).predict(&x); let y_hat_qr = LinearRegression::fit(
&x,
&y,
LinearRegressionParameters {
solver: LinearRegressionSolverName::QR,
},
)
.predict(&x);
let y_hat_svd = LinearRegression::fit(&x, &y, Default::default()).predict(&x); let y_hat_svd = LinearRegression::fit(&x, &y, Default::default()).predict(&x);
@@ -143,7 +154,14 @@ mod tests {
114.2, 115.7, 116.9, 114.2, 115.7, 116.9,
]; ];
let y_hat_qr = LinearRegression::fit(&x, &y, LinearRegressionParameters{solver: LinearRegressionSolverName::QR}).predict(&x); let y_hat_qr = LinearRegression::fit(
&x,
&y,
LinearRegressionParameters {
solver: LinearRegressionSolverName::QR,
},
)
.predict(&x);
let y_hat_svd = LinearRegression::fit(&x, &y, Default::default()).predict(&x); let y_hat_svd = LinearRegression::fit(&x, &y, Default::default()).predict(&x);
+14 -13
View File
@@ -13,9 +13,9 @@ pub enum KNNAlgorithmName {
} }
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct KNNClassifierParameters { pub struct KNNClassifierParameters {
pub algorithm: KNNAlgorithmName, pub algorithm: KNNAlgorithmName,
pub k: usize pub k: usize,
} }
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
@@ -34,9 +34,9 @@ enum KNNAlgorithm<T: FloatExt, D: Distance<Vec<T>, T>> {
impl Default for KNNClassifierParameters { impl Default for KNNClassifierParameters {
fn default() -> Self { fn default() -> Self {
KNNClassifierParameters { KNNClassifierParameters {
algorithm: KNNAlgorithmName::CoverTree, algorithm: KNNAlgorithmName::CoverTree,
k: 3 k: 3,
} }
} }
} }
@@ -93,7 +93,7 @@ impl<T: FloatExt, D: Distance<Vec<T>, T>> KNNClassifier<T, D> {
x: &M, x: &M,
y: &M::RowVector, y: &M::RowVector,
distance: D, distance: D,
parameters: KNNClassifierParameters parameters: KNNClassifierParameters,
) -> KNNClassifier<T, D> { ) -> KNNClassifier<T, D> {
let y_m = M::from_row_vector(y.clone()); let y_m = M::from_row_vector(y.clone());
@@ -118,7 +118,10 @@ impl<T: FloatExt, D: Distance<Vec<T>, T>> KNNClassifier<T, D> {
) )
); );
assert!(parameters.k > 1, format!("k should be > 1, k=[{}]", parameters.k)); assert!(
parameters.k > 1,
format!("k should be > 1, k=[{}]", parameters.k)
);
KNNClassifier { KNNClassifier {
classes: classes, classes: classes,
@@ -169,7 +172,10 @@ mod tests {
&x, &x,
&y, &y,
Distances::euclidian(), Distances::euclidian(),
KNNClassifierParameters{k: 3, algorithm: KNNAlgorithmName::LinearSearch} KNNClassifierParameters {
k: 3,
algorithm: KNNAlgorithmName::LinearSearch,
},
); );
let r = knn.predict(&x); let r = knn.predict(&x);
assert_eq!(5, Vec::len(&r)); assert_eq!(5, Vec::len(&r));
@@ -181,12 +187,7 @@ mod tests {
let x = DenseMatrix::from_array(&[&[1., 2.], &[3., 4.], &[5., 6.], &[7., 8.], &[9., 10.]]); let x = DenseMatrix::from_array(&[&[1., 2.], &[3., 4.], &[5., 6.], &[7., 8.], &[9., 10.]]);
let y = vec![2., 2., 2., 3., 3.]; let y = vec![2., 2., 2., 3., 3.];
let knn = KNNClassifier::fit( let knn = KNNClassifier::fit(&x, &y, Distances::euclidian(), Default::default());
&x,
&y,
Distances::euclidian(),
Default::default()
);
let deserialized_knn = bincode::deserialize(&bincode::serialize(&knn).unwrap()).unwrap(); let deserialized_knn = bincode::deserialize(&bincode::serialize(&knn).unwrap()).unwrap();
+1 -1
View File
@@ -5,7 +5,7 @@ pub type F<'a, T, X> = dyn for<'b> Fn(&'b X) -> T + 'a;
pub type DF<'a, X> = dyn for<'b> Fn(&'b mut X, &'b X) + 'a; pub type DF<'a, X> = dyn for<'b> Fn(&'b mut X, &'b X) + 'a;
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub enum FunctionOrder { pub enum FunctionOrder {
SECOND, SECOND,
THIRD, THIRD,
} }