File tree Expand file tree Collapse file tree 3 files changed +40
-0
lines changed Expand file tree Collapse file tree 3 files changed +40
-0
lines changed Original file line number Diff line number Diff line change
1
+ mod mse_loss;
2
+
3
+ pub use self :: mse_loss:: mse_loss;
Original file line number Diff line number Diff line change
1
+ //! # Mean Square Loss Function
2
+ //!
3
+ //! The `mse_loss` function calculates the Mean Square Error loss, which is a
4
+ //! robust loss function used in machine learning.
5
+ //!
6
+ //! ## Formula
7
+ //!
8
+ //! For a pair of actual and predicted values, represented as vectors `actual`
9
+ //! and `predicted`, the Mean Square loss is calculated as:
10
+ //!
11
+ //! - loss = `(actual - predicted)^2 / n_elements`.
12
+ //!
13
+ //! It returns the average loss by dividing the `total_loss` by total no. of
14
+ //! elements.
15
+ //!
16
+ pub fn mse_loss ( predicted : & Vec < f64 > , actual : & [ f64 ] ) -> f64 {
17
+ let mut total_loss: f64 = 0.0 ;
18
+ for ( p, a) in predicted. iter ( ) . zip ( actual. iter ( ) ) {
19
+ let diff: f64 = p - a;
20
+ total_loss += diff * diff;
21
+ }
22
+ total_loss / ( predicted. len ( ) as f64 )
23
+ }
24
+
25
+ #[ cfg( test) ]
26
+ mod tests {
27
+ use super :: * ;
28
+
29
+ #[ test]
30
+ fn test_mse_loss ( ) {
31
+ let predicted_values: Vec < f64 > = vec ! [ 1.0 , 2.0 , 3.0 , 4.0 ] ;
32
+ let actual_values: Vec < f64 > = vec ! [ 1.0 , 3.0 , 3.5 , 4.5 ] ;
33
+ assert_eq ! ( mse_loss( & predicted_values, & actual_values) , 0.375 ) ;
34
+ }
35
+ }
Original file line number Diff line number Diff line change 1
1
mod linear_regression;
2
+ mod loss_function;
2
3
mod optimization;
3
4
4
5
pub use self :: linear_regression:: linear_regression;
6
+ pub use self :: loss_function:: mse_loss;
5
7
pub use self :: optimization:: gradient_descent;
You can’t perform that action at this time.
0 commit comments