diff --git a/src/cluster/dbscan.rs b/src/cluster/dbscan.rs
index 787d8d3..e595028 100644
--- a/src/cluster/dbscan.rs
+++ b/src/cluster/dbscan.rs
@@ -29,8 +29,6 @@
//! * ["A Density-Based Algorithm for Discovering Clusters in Large Spatial Databases with Noise", Ester M., Kriegel HP., Sander J., Xu X.](http://faculty.marshall.usc.edu/gareth-james/ISL/)
//! * ["Density-Based Clustering in Spatial Databases: The Algorithm GDBSCAN and its Applications", Sander J., Ester M., Kriegel HP., Xu X.](https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.63.1629&rep=rep1&type=pdf)
-extern crate rand;
-
use std::fmt::Debug;
use std::iter::Sum;
diff --git a/src/cluster/kmeans.rs b/src/cluster/kmeans.rs
index 0da8a72..26a4038 100644
--- a/src/cluster/kmeans.rs
+++ b/src/cluster/kmeans.rs
@@ -52,8 +52,6 @@
//! * ["An Introduction to Statistical Learning", James G., Witten D., Hastie T., Tibshirani R., 10.3.1 K-Means Clustering](http://faculty.marshall.usc.edu/gareth-james/ISL/)
//! * ["k-means++: The Advantages of Careful Seeding", Arthur D., Vassilvitskii S.](http://ilpubs.stanford.edu:8090/778/1/2006-13.pdf)
-extern crate rand;
-
use rand::Rng;
use std::fmt::Debug;
use std::iter::Sum;
diff --git a/src/ensemble/random_forest_classifier.rs b/src/ensemble/random_forest_classifier.rs
index 0cfebf1..e1d462a 100644
--- a/src/ensemble/random_forest_classifier.rs
+++ b/src/ensemble/random_forest_classifier.rs
@@ -45,8 +45,6 @@
//!
//!
//!
-extern crate rand;
-
use std::default::Default;
use std::fmt::Debug;
diff --git a/src/ensemble/random_forest_regressor.rs b/src/ensemble/random_forest_regressor.rs
index c704a8f..36fa096 100644
--- a/src/ensemble/random_forest_regressor.rs
+++ b/src/ensemble/random_forest_regressor.rs
@@ -42,7 +42,6 @@
//!
//!
//!
-extern crate rand;
use std::default::Default;
use std::fmt::Debug;
diff --git a/src/error/mod.rs b/src/error/mod.rs
index 679f685..1615290 100644
--- a/src/error/mod.rs
+++ b/src/error/mod.rs
@@ -82,7 +82,7 @@ impl PartialEq for Failed {
}
impl fmt::Display for FailedError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let failed_err_str = match self {
FailedError::FitFailed => "Fit failed",
FailedError::PredictFailed => "Predict failed",
@@ -96,7 +96,7 @@ impl fmt::Display for FailedError {
}
impl fmt::Display for Failed {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}: {}", self.err, self.msg)
}
}
diff --git a/src/linalg/mod.rs b/src/linalg/mod.rs
index 09a9687..fc9d6c9 100644
--- a/src/linalg/mod.rs
+++ b/src/linalg/mod.rs
@@ -515,7 +515,7 @@ pub trait Matrix:
{
}
-pub(crate) fn row_iter>(m: &M) -> RowIter {
+pub(crate) fn row_iter>(m: &M) -> RowIter<'_, F, M> {
RowIter {
m,
pos: 0,
diff --git a/src/linalg/naive/dense_matrix.rs b/src/linalg/naive/dense_matrix.rs
index c1ba650..aff0fa2 100644
--- a/src/linalg/naive/dense_matrix.rs
+++ b/src/linalg/naive/dense_matrix.rs
@@ -1,4 +1,3 @@
-extern crate num;
use std::fmt;
use std::fmt::Debug;
use std::marker::PhantomData;
@@ -197,7 +196,7 @@ pub struct DenseMatrixIterator<'a, T: RealNumber> {
}
impl fmt::Display for DenseMatrix {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut rows: Vec> = Vec::new();
for r in 0..self.nrows {
rows.push(
@@ -356,7 +355,7 @@ impl<'de, T: RealNumber + fmt::Debug + Deserialize<'de>> Deserialize<'de> for De
impl<'a, T: RealNumber + fmt::Debug + Deserialize<'a>> Visitor<'a> for DenseMatrixVisitor {
type Value = DenseMatrix;
- fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("struct DenseMatrix")
}
diff --git a/src/model_selection/mod.rs b/src/model_selection/mod.rs
index c53451d..d4908f6 100644
--- a/src/model_selection/mod.rs
+++ b/src/model_selection/mod.rs
@@ -8,7 +8,6 @@
//! your data.
//!
//! In SmartCore you can split your data into training and test datasets using `train_test_split` function.
-extern crate rand;
use crate::linalg::BaseVector;
use crate::linalg::Matrix;
diff --git a/src/optimization/first_order/gradient_descent.rs b/src/optimization/first_order/gradient_descent.rs
index 9cc78ec..d57896f 100644
--- a/src/optimization/first_order/gradient_descent.rs
+++ b/src/optimization/first_order/gradient_descent.rs
@@ -25,8 +25,8 @@ impl Default for GradientDescent {
impl FirstOrderOptimizer for GradientDescent {
fn optimize<'a, X: Matrix, LS: LineSearchMethod>(
&self,
- f: &'a F,
- df: &'a DF,
+ f: &'a F<'_, T, X>,
+ df: &'a DF<'_, X>,
x0: &X,
ls: &'a LS,
) -> OptimizerResult {
diff --git a/src/optimization/first_order/lbfgs.rs b/src/optimization/first_order/lbfgs.rs
index b63f617..5dedfe6 100644
--- a/src/optimization/first_order/lbfgs.rs
+++ b/src/optimization/first_order/lbfgs.rs
@@ -100,8 +100,8 @@ impl LBFGS {
fn update_state<'a, X: Matrix, LS: LineSearchMethod>(
&self,
- f: &'a F,
- df: &'a DF,
+ f: &'a F<'_, T, X>,
+ df: &'a DF<'_, X>,
ls: &'a LS,
state: &mut LBFGSState,
) {
@@ -162,7 +162,7 @@ impl LBFGS {
g_converged || x_converged || state.counter_f_tol > self.successive_f_tol
}
- fn update_hessian<'a, X: Matrix>(&self, _: &'a DF, state: &mut LBFGSState) {
+ fn update_hessian<'a, X: Matrix>(&self, _: &'a DF<'_, X>, state: &mut LBFGSState) {
state.dg = state.x_df.sub(&state.x_df_prev);
let rho_iteration = T::one() / state.dx.dot(&state.dg);
if !rho_iteration.is_infinite() {
@@ -198,8 +198,8 @@ struct LBFGSState> {
impl FirstOrderOptimizer for LBFGS {
fn optimize<'a, X: Matrix, LS: LineSearchMethod>(
&self,
- f: &F,
- df: &'a DF,
+ f: &F<'_, T, X>,
+ df: &'a DF<'_, X>,
x0: &X,
ls: &'a LS,
) -> OptimizerResult {
diff --git a/src/optimization/first_order/mod.rs b/src/optimization/first_order/mod.rs
index d1c628f..f2e476f 100644
--- a/src/optimization/first_order/mod.rs
+++ b/src/optimization/first_order/mod.rs
@@ -12,8 +12,8 @@ use crate::optimization::{DF, F};
pub trait FirstOrderOptimizer {
fn optimize<'a, X: Matrix, LS: LineSearchMethod>(
&self,
- f: &F,
- df: &'a DF,
+ f: &F<'_, T, X>,
+ df: &'a DF<'_, X>,
x0: &X,
ls: &'a LS,
) -> OptimizerResult;
diff --git a/src/svm/svc.rs b/src/svm/svc.rs
index bac6e4e..62a9e01 100644
--- a/src/svm/svc.rs
+++ b/src/svm/svc.rs
@@ -378,7 +378,7 @@ impl<'a, T: RealNumber, M: Matrix, K: Kernel> Optimizer<'a,
(support_vectors, w, b)
}
- fn initialize(&mut self, cache: &mut Cache) {
+ fn initialize(&mut self, cache: &mut Cache<'_, T, M, K>) {
let (n, _) = self.x.shape();
let few = 5;
let mut cp = 0;
@@ -402,7 +402,7 @@ impl<'a, T: RealNumber, M: Matrix, K: Kernel> Optimizer<'a,
}
}
- fn process(&mut self, i: usize, x: M::RowVector, y: T, cache: &mut Cache) -> bool {
+ fn process(&mut self, i: usize, x: M::RowVector, y: T, cache: &mut Cache<'_, T, M, K>) -> bool {
for j in 0..self.sv.len() {
if self.sv[j].index == i {
return true;
@@ -445,13 +445,13 @@ impl<'a, T: RealNumber, M: Matrix, K: Kernel> Optimizer<'a,
true
}
- fn reprocess(&mut self, tol: T, cache: &mut Cache) -> bool {
+ fn reprocess(&mut self, tol: T, cache: &mut Cache<'_, T, M, K>) -> bool {
let status = self.smo(None, None, tol, cache);
self.clean(cache);
status
}
- fn finish(&mut self, cache: &mut Cache) {
+ fn finish(&mut self, cache: &mut Cache<'_, T, M, K>) {
let mut max_iter = self.sv.len();
while self.smo(None, None, self.parameters.tol, cache) && max_iter > 0 {
@@ -486,7 +486,7 @@ impl<'a, T: RealNumber, M: Matrix, K: Kernel> Optimizer<'a,
self.recalculate_minmax_grad = false
}
- fn clean(&mut self, cache: &mut Cache) {
+ fn clean(&mut self, cache: &mut Cache<'_, T, M, K>) {
self.find_min_max_gradient();
let gmax = self.gmax;
@@ -520,7 +520,7 @@ impl<'a, T: RealNumber, M: Matrix, K: Kernel> Optimizer<'a,
&mut self,
idx_1: Option,
idx_2: Option,
- cache: &mut Cache,
+ cache: &mut Cache<'_, T, M, K>,
) -> Option<(usize, usize, T)> {
match (idx_1, idx_2) {
(None, None) => {
@@ -614,7 +614,7 @@ impl<'a, T: RealNumber, M: Matrix, K: Kernel> Optimizer<'a,
idx_1: Option,
idx_2: Option,
tol: T,
- cache: &mut Cache,
+ cache: &mut Cache<'_, T, M, K>,
) -> bool {
match self.select_pair(idx_1, idx_2, cache) {
Some((idx_1, idx_2, k_v_12)) => {
@@ -653,7 +653,7 @@ impl<'a, T: RealNumber, M: Matrix, K: Kernel> Optimizer<'a,
}
}
- fn update(&mut self, v1: usize, v2: usize, step: T, cache: &mut Cache) {
+ fn update(&mut self, v1: usize, v2: usize, step: T, cache: &mut Cache<'_, T, M, K>) {
self.sv[v1].alpha -= step;
self.sv[v2].alpha += step;
diff --git a/src/svm/svr.rs b/src/svm/svr.rs
index 36f308a..5d007d7 100644
--- a/src/svm/svr.rs
+++ b/src/svm/svr.rs
@@ -469,7 +469,7 @@ impl Cache {
}
}
- fn get Vec>(&self, i: usize, or: F) -> Ref> {
+ fn get Vec>(&self, i: usize, or: F) -> Ref<'_, Vec> {
if self.data[i].borrow().is_none() {
self.data[i].replace(Some(or()));
}
diff --git a/src/tree/decision_tree_classifier.rs b/src/tree/decision_tree_classifier.rs
index b30fb2d..353c1bd 100644
--- a/src/tree/decision_tree_classifier.rs
+++ b/src/tree/decision_tree_classifier.rs
@@ -334,7 +334,7 @@ impl DecisionTreeClassifier {
let mut visitor = NodeVisitor::::new(0, samples, &order, &x, &yi, 1);
- let mut visitor_queue: LinkedList> = LinkedList::new();
+ let mut visitor_queue: LinkedList> = LinkedList::new();
if tree.find_best_cutoff(&mut visitor, mtry) {
visitor_queue.push_back(visitor);
@@ -392,7 +392,7 @@ impl DecisionTreeClassifier {
fn find_best_cutoff>(
&mut self,
- visitor: &mut NodeVisitor,
+ visitor: &mut NodeVisitor<'_, T, M>,
mtry: usize,
) -> bool {
let (n_rows, n_attr) = visitor.x.shape();
@@ -455,7 +455,7 @@ impl DecisionTreeClassifier {
fn find_best_split>(
&mut self,
- visitor: &mut NodeVisitor,
+ visitor: &mut NodeVisitor<'_, T, M>,
n: usize,
count: &Vec,
false_count: &mut Vec,
diff --git a/src/tree/decision_tree_regressor.rs b/src/tree/decision_tree_regressor.rs
index 0d6da54..39f3eb8 100644
--- a/src/tree/decision_tree_regressor.rs
+++ b/src/tree/decision_tree_regressor.rs
@@ -240,7 +240,7 @@ impl DecisionTreeRegressor {
let mut visitor = NodeVisitor::::new(0, samples, &order, &x, &y_m, 1);
- let mut visitor_queue: LinkedList> = LinkedList::new();
+ let mut visitor_queue: LinkedList> = LinkedList::new();
if tree.find_best_cutoff(&mut visitor, mtry) {
visitor_queue.push_back(visitor);
@@ -298,7 +298,7 @@ impl DecisionTreeRegressor {
fn find_best_cutoff>(
&mut self,
- visitor: &mut NodeVisitor,
+ visitor: &mut NodeVisitor<'_, T, M>,
mtry: usize,
) -> bool {
let (_, n_attr) = visitor.x.shape();
@@ -332,7 +332,7 @@ impl DecisionTreeRegressor {
fn find_best_split>(
&mut self,
- visitor: &mut NodeVisitor,
+ visitor: &mut NodeVisitor<'_, T, M>,
n: usize,
sum: T,
parent_gain: T,