Fix rust-2018-idioms warnings

This commit is contained in:
Luis Moreno
2020-11-08 20:24:08 -04:00
parent ea5de9758a
commit 54886ebd72
15 changed files with 29 additions and 38 deletions
-2
View File
@@ -29,8 +29,6 @@
//! * ["A Density-Based Algorithm for Discovering Clusters in Large Spatial Databases with Noise", Ester M., Kriegel HP., Sander J., Xu X.](http://faculty.marshall.usc.edu/gareth-james/ISL/)
//! * ["Density-Based Clustering in Spatial Databases: The Algorithm GDBSCAN and its Applications", Sander J., Ester M., Kriegel HP., Xu X.](https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.63.1629&rep=rep1&type=pdf)
extern crate rand;
use std::fmt::Debug;
use std::iter::Sum;
-2
View File
@@ -52,8 +52,6 @@
//! * ["An Introduction to Statistical Learning", James G., Witten D., Hastie T., Tibshirani R., 10.3.1 K-Means Clustering](http://faculty.marshall.usc.edu/gareth-james/ISL/)
//! * ["k-means++: The Advantages of Careful Seeding", Arthur D., Vassilvitskii S.](http://ilpubs.stanford.edu:8090/778/1/2006-13.pdf)
extern crate rand;
use rand::Rng;
use std::fmt::Debug;
use std::iter::Sum;
-2
View File
@@ -45,8 +45,6 @@
//!
//! <script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script>
//! <script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script>
extern crate rand;
use std::default::Default;
use std::fmt::Debug;
-1
View File
@@ -42,7 +42,6 @@
//!
//! <script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script>
//! <script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script>
extern crate rand;
use std::default::Default;
use std::fmt::Debug;
+2 -2
View File
@@ -82,7 +82,7 @@ impl PartialEq for Failed {
}
impl fmt::Display for FailedError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let failed_err_str = match self {
FailedError::FitFailed => "Fit failed",
FailedError::PredictFailed => "Predict failed",
@@ -96,7 +96,7 @@ impl fmt::Display for FailedError {
}
impl fmt::Display for Failed {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}: {}", self.err, self.msg)
}
}
+1 -1
View File
@@ -515,7 +515,7 @@ pub trait Matrix<T: RealNumber>:
{
}
pub(crate) fn row_iter<F: RealNumber, M: BaseMatrix<F>>(m: &M) -> RowIter<F, M> {
pub(crate) fn row_iter<F: RealNumber, M: BaseMatrix<F>>(m: &M) -> RowIter<'_, F, M> {
RowIter {
m,
pos: 0,
+2 -3
View File
@@ -1,4 +1,3 @@
extern crate num;
use std::fmt;
use std::fmt::Debug;
use std::marker::PhantomData;
@@ -197,7 +196,7 @@ pub struct DenseMatrixIterator<'a, T: RealNumber> {
}
impl<T: RealNumber> fmt::Display for DenseMatrix<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut rows: Vec<Vec<f64>> = Vec::new();
for r in 0..self.nrows {
rows.push(
@@ -356,7 +355,7 @@ impl<'de, T: RealNumber + fmt::Debug + Deserialize<'de>> Deserialize<'de> for De
impl<'a, T: RealNumber + fmt::Debug + Deserialize<'a>> Visitor<'a> for DenseMatrixVisitor<T> {
type Value = DenseMatrix<T>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("struct DenseMatrix")
}
-1
View File
@@ -8,7 +8,6 @@
//! your data.
//!
//! In SmartCore you can split your data into training and test datasets using `train_test_split` function.
extern crate rand;
use crate::linalg::BaseVector;
use crate::linalg::Matrix;
@@ -25,8 +25,8 @@ impl<T: RealNumber> Default for GradientDescent<T> {
impl<T: RealNumber> FirstOrderOptimizer<T> for GradientDescent<T> {
fn optimize<'a, X: Matrix<T>, LS: LineSearchMethod<T>>(
&self,
f: &'a F<T, X>,
df: &'a DF<X>,
f: &'a F<'_, T, X>,
df: &'a DF<'_, X>,
x0: &X,
ls: &'a LS,
) -> OptimizerResult<T, X> {
+5 -5
View File
@@ -100,8 +100,8 @@ impl<T: RealNumber> LBFGS<T> {
fn update_state<'a, X: Matrix<T>, LS: LineSearchMethod<T>>(
&self,
f: &'a F<T, X>,
df: &'a DF<X>,
f: &'a F<'_, T, X>,
df: &'a DF<'_, X>,
ls: &'a LS,
state: &mut LBFGSState<T, X>,
) {
@@ -162,7 +162,7 @@ impl<T: RealNumber> LBFGS<T> {
g_converged || x_converged || state.counter_f_tol > self.successive_f_tol
}
fn update_hessian<'a, X: Matrix<T>>(&self, _: &'a DF<X>, state: &mut LBFGSState<T, X>) {
fn update_hessian<'a, X: Matrix<T>>(&self, _: &'a DF<'_, X>, state: &mut LBFGSState<T, X>) {
state.dg = state.x_df.sub(&state.x_df_prev);
let rho_iteration = T::one() / state.dx.dot(&state.dg);
if !rho_iteration.is_infinite() {
@@ -198,8 +198,8 @@ struct LBFGSState<T: RealNumber, X: Matrix<T>> {
impl<T: RealNumber> FirstOrderOptimizer<T> for LBFGS<T> {
fn optimize<'a, X: Matrix<T>, LS: LineSearchMethod<T>>(
&self,
f: &F<T, X>,
df: &'a DF<X>,
f: &F<'_, T, X>,
df: &'a DF<'_, X>,
x0: &X,
ls: &'a LS,
) -> OptimizerResult<T, X> {
+2 -2
View File
@@ -12,8 +12,8 @@ use crate::optimization::{DF, F};
pub trait FirstOrderOptimizer<T: RealNumber> {
fn optimize<'a, X: Matrix<T>, LS: LineSearchMethod<T>>(
&self,
f: &F<T, X>,
df: &'a DF<X>,
f: &F<'_, T, X>,
df: &'a DF<'_, X>,
x0: &X,
ls: &'a LS,
) -> OptimizerResult<T, X>;
+8 -8
View File
@@ -378,7 +378,7 @@ impl<'a, T: RealNumber, M: Matrix<T>, K: Kernel<T, M::RowVector>> Optimizer<'a,
(support_vectors, w, b)
}
fn initialize(&mut self, cache: &mut Cache<T, M, K>) {
fn initialize(&mut self, cache: &mut Cache<'_, T, M, K>) {
let (n, _) = self.x.shape();
let few = 5;
let mut cp = 0;
@@ -402,7 +402,7 @@ impl<'a, T: RealNumber, M: Matrix<T>, K: Kernel<T, M::RowVector>> Optimizer<'a,
}
}
fn process(&mut self, i: usize, x: M::RowVector, y: T, cache: &mut Cache<T, M, K>) -> bool {
fn process(&mut self, i: usize, x: M::RowVector, y: T, cache: &mut Cache<'_, T, M, K>) -> bool {
for j in 0..self.sv.len() {
if self.sv[j].index == i {
return true;
@@ -445,13 +445,13 @@ impl<'a, T: RealNumber, M: Matrix<T>, K: Kernel<T, M::RowVector>> Optimizer<'a,
true
}
fn reprocess(&mut self, tol: T, cache: &mut Cache<T, M, K>) -> bool {
fn reprocess(&mut self, tol: T, cache: &mut Cache<'_, T, M, K>) -> bool {
let status = self.smo(None, None, tol, cache);
self.clean(cache);
status
}
fn finish(&mut self, cache: &mut Cache<T, M, K>) {
fn finish(&mut self, cache: &mut Cache<'_, T, M, K>) {
let mut max_iter = self.sv.len();
while self.smo(None, None, self.parameters.tol, cache) && max_iter > 0 {
@@ -486,7 +486,7 @@ impl<'a, T: RealNumber, M: Matrix<T>, K: Kernel<T, M::RowVector>> Optimizer<'a,
self.recalculate_minmax_grad = false
}
fn clean(&mut self, cache: &mut Cache<T, M, K>) {
fn clean(&mut self, cache: &mut Cache<'_, T, M, K>) {
self.find_min_max_gradient();
let gmax = self.gmax;
@@ -520,7 +520,7 @@ impl<'a, T: RealNumber, M: Matrix<T>, K: Kernel<T, M::RowVector>> Optimizer<'a,
&mut self,
idx_1: Option<usize>,
idx_2: Option<usize>,
cache: &mut Cache<T, M, K>,
cache: &mut Cache<'_, T, M, K>,
) -> Option<(usize, usize, T)> {
match (idx_1, idx_2) {
(None, None) => {
@@ -614,7 +614,7 @@ impl<'a, T: RealNumber, M: Matrix<T>, K: Kernel<T, M::RowVector>> Optimizer<'a,
idx_1: Option<usize>,
idx_2: Option<usize>,
tol: T,
cache: &mut Cache<T, M, K>,
cache: &mut Cache<'_, T, M, K>,
) -> bool {
match self.select_pair(idx_1, idx_2, cache) {
Some((idx_1, idx_2, k_v_12)) => {
@@ -653,7 +653,7 @@ impl<'a, T: RealNumber, M: Matrix<T>, K: Kernel<T, M::RowVector>> Optimizer<'a,
}
}
fn update(&mut self, v1: usize, v2: usize, step: T, cache: &mut Cache<T, M, K>) {
fn update(&mut self, v1: usize, v2: usize, step: T, cache: &mut Cache<'_, T, M, K>) {
self.sv[v1].alpha -= step;
self.sv[v2].alpha += step;
+1 -1
View File
@@ -469,7 +469,7 @@ impl<T: Clone> Cache<T> {
}
}
fn get<F: Fn() -> Vec<T>>(&self, i: usize, or: F) -> Ref<Vec<T>> {
fn get<F: Fn() -> Vec<T>>(&self, i: usize, or: F) -> Ref<'_, Vec<T>> {
if self.data[i].borrow().is_none() {
self.data[i].replace(Some(or()));
}
+3 -3
View File
@@ -334,7 +334,7 @@ impl<T: RealNumber> DecisionTreeClassifier<T> {
let mut visitor = NodeVisitor::<T, M>::new(0, samples, &order, &x, &yi, 1);
let mut visitor_queue: LinkedList<NodeVisitor<T, M>> = LinkedList::new();
let mut visitor_queue: LinkedList<NodeVisitor<'_, T, M>> = LinkedList::new();
if tree.find_best_cutoff(&mut visitor, mtry) {
visitor_queue.push_back(visitor);
@@ -392,7 +392,7 @@ impl<T: RealNumber> DecisionTreeClassifier<T> {
fn find_best_cutoff<M: Matrix<T>>(
&mut self,
visitor: &mut NodeVisitor<T, M>,
visitor: &mut NodeVisitor<'_, T, M>,
mtry: usize,
) -> bool {
let (n_rows, n_attr) = visitor.x.shape();
@@ -455,7 +455,7 @@ impl<T: RealNumber> DecisionTreeClassifier<T> {
fn find_best_split<M: Matrix<T>>(
&mut self,
visitor: &mut NodeVisitor<T, M>,
visitor: &mut NodeVisitor<'_, T, M>,
n: usize,
count: &Vec<usize>,
false_count: &mut Vec<usize>,
+3 -3
View File
@@ -240,7 +240,7 @@ impl<T: RealNumber> DecisionTreeRegressor<T> {
let mut visitor = NodeVisitor::<T, M>::new(0, samples, &order, &x, &y_m, 1);
let mut visitor_queue: LinkedList<NodeVisitor<T, M>> = LinkedList::new();
let mut visitor_queue: LinkedList<NodeVisitor<'_, T, M>> = LinkedList::new();
if tree.find_best_cutoff(&mut visitor, mtry) {
visitor_queue.push_back(visitor);
@@ -298,7 +298,7 @@ impl<T: RealNumber> DecisionTreeRegressor<T> {
fn find_best_cutoff<M: Matrix<T>>(
&mut self,
visitor: &mut NodeVisitor<T, M>,
visitor: &mut NodeVisitor<'_, T, M>,
mtry: usize,
) -> bool {
let (_, n_attr) = visitor.x.shape();
@@ -332,7 +332,7 @@ impl<T: RealNumber> DecisionTreeRegressor<T> {
fn find_best_split<M: Matrix<T>>(
&mut self,
visitor: &mut NodeVisitor<T, M>,
visitor: &mut NodeVisitor<'_, T, M>,
n: usize,
sum: T,
parent_gain: T,