From 4426515012fe8b8ebcb7fc1162015648b287664c Mon Sep 17 00:00:00 2001 From: ykarni Date: Mon, 2 Jan 2023 15:58:32 +0200 Subject: [PATCH] make cargo build happy --- src/block_leaky_relu.rs | 30 +++++++++--------------------- src/block_relu.rs | 2 +- src/block_sigmoid.rs | 38 ++++++++++++++------------------------ src/block_tanh.rs | 31 ++++++++++--------------------- src/regressor.rs | 7 ++++--- 5 files changed, 38 insertions(+), 70 deletions(-) diff --git a/src/block_leaky_relu.rs b/src/block_leaky_relu.rs index 31117512..2790e8de 100644 --- a/src/block_leaky_relu.rs +++ b/src/block_leaky_relu.rs @@ -1,22 +1,13 @@ use std::any::Any; -use std::io; -use merand48::*; -use core::arch::x86_64::*; use std::error::Error; -use std::mem::{self, MaybeUninit}; - -use crate::optimizer; use crate::regressor; use crate::model_instance; use crate::feature_buffer; use crate::port_buffer; -use crate::consts; use crate::block_helpers; use crate::graph; -use optimizer::OptimizerTrait; use regressor::BlockTrait; -use block_helpers::{Weight, WeightAndOptimizerData}; use crate::graph::{BlockGraph}; @@ -29,12 +20,11 @@ pub struct BlockLeakyRELU { pub fn new_leaky_relu_block(bg: &mut BlockGraph, - mi: &model_instance::ModelInstance, input: graph::BlockPtrOutput, ) -> Result> { let num_inputs = bg.get_num_output_values(vec![&input]); assert!(num_inputs != 0); - let mut block = Box::new(BlockLeakyRELU { + let block = Box::new(BlockLeakyRELU { output_offset: usize::MAX, input_offset: usize::MAX, num_inputs: num_inputs, @@ -110,7 +100,7 @@ impl BlockTrait for BlockLeakyRELU } - fn allocate_and_init_weights(&mut self, mi: &model_instance::ModelInstance) {} + fn allocate_and_init_weights(&mut self, _mi: &model_instance::ModelInstance) {} fn get_num_output_values(&self, output: graph::OutputSlot) -> usize { assert!(output.get_output_index() == 0); @@ -136,10 +126,8 @@ mod tests { use super::*; use crate::block_misc; use crate::feature_buffer; - use crate::feature_buffer::HashAndValueAndSeq; - use crate::vwmap; - use block_helpers::{slearn2, spredict2}; - use block_misc::{Observe}; + use block_helpers::slearn2; + use block_misc::Observe; use crate::assert_epsilon; fn fb_vec() -> feature_buffer::FeatureBuffer { @@ -159,8 +147,8 @@ mod tests { let mut mi = model_instance::ModelInstance::new_empty().unwrap(); let mut bg = BlockGraph::new(); let input_block = block_misc::new_const_block(&mut bg, vec![2.0]).unwrap(); - let leaky_relu_block = new_leaky_relu_block(&mut bg, &mi, input_block).unwrap(); - let observe_block = block_misc::new_observe_block(&mut bg, leaky_relu_block, Observe::Forward, Some(1.0)).unwrap(); + let leaky_relu_block = new_leaky_relu_block(&mut bg, input_block).unwrap(); + block_misc::new_observe_block(&mut bg, leaky_relu_block, Observe::Forward, Some(1.0)).unwrap(); bg.finalize(); bg.allocate_and_init_weights(&mi); @@ -172,11 +160,11 @@ mod tests { } fn test_simple_negative() { - let mut mi = model_instance::ModelInstance::new_empty().unwrap(); + let mi = model_instance::ModelInstance::new_empty().unwrap(); let mut bg = BlockGraph::new(); let input_block = block_misc::new_const_block(&mut bg, vec![-2.0]).unwrap(); - let leaky_relu_block = new_leaky_relu_block(&mut bg, &mi, input_block).unwrap(); - let observe_block = block_misc::new_observe_block(&mut bg, leaky_relu_block, Observe::Forward, Some(1.0)).unwrap(); + let leaky_relu_block = new_leaky_relu_block(&mut bg, input_block).unwrap(); + block_misc::new_observe_block(&mut bg, leaky_relu_block, Observe::Forward, Some(1.0)).unwrap(); bg.finalize(); bg.allocate_and_init_weights(&mi); diff --git a/src/block_relu.rs b/src/block_relu.rs index b3c8cf1b..94e3da80 100644 --- a/src/block_relu.rs +++ b/src/block_relu.rs @@ -151,7 +151,7 @@ mod tests { use crate::feature_buffer::HashAndValueAndSeq; use crate::vwmap; use block_helpers::{slearn2, spredict2}; - use block_misc::{Observe}; + use block_misc::Observe; use crate::assert_epsilon; fn fb_vec() -> feature_buffer::FeatureBuffer { diff --git a/src/block_sigmoid.rs b/src/block_sigmoid.rs index d3beb683..fa389def 100644 --- a/src/block_sigmoid.rs +++ b/src/block_sigmoid.rs @@ -1,21 +1,14 @@ use std::any::Any; -use std::io; -use merand48::*; -use core::arch::x86_64::*; use std::error::Error; -use std::mem::{self, MaybeUninit}; +use libm::expf; -use crate::optimizer; use crate::regressor; use crate::model_instance; use crate::feature_buffer; use crate::port_buffer; -use crate::consts; use crate::block_helpers; use crate::graph; -use optimizer::OptimizerTrait; use regressor::BlockTrait; -use block_helpers::{Weight, WeightAndOptimizerData}; use crate::graph::{BlockGraph}; pub struct BlockSigmoid { @@ -25,12 +18,11 @@ pub struct BlockSigmoid { } pub fn new_sigmoid_block(bg: &mut graph::BlockGraph, - mi: &model_instance::ModelInstance, input: graph::BlockPtrOutput, ) -> Result> { let num_inputs = bg.get_num_output_values(vec![&input]); assert!(num_inputs != 0); - let mut block = Box::new(BlockSigmoid { + let block = Box::new(BlockSigmoid { output_offset: usize::MAX, input_offset: usize::MAX, num_inputs: num_inputs, @@ -46,7 +38,7 @@ impl BlockTrait for BlockSigmoid self } - fn allocate_and_init_weights(&mut self, mi: &model_instance::ModelInstance) {} + fn allocate_and_init_weights(&mut self, _mi: &model_instance::ModelInstance) {} fn get_num_output_slots(&self) -> usize { 1 } @@ -82,13 +74,13 @@ impl BlockTrait for BlockSigmoid // for now doing the actual slow sigmoid computation. once we establish a baseline, // we can replace with a fast approximation or a lookup table - if x < 0. { - let epx = f32::powf(e, x); + if x < 0.0 { + let epx = expf(x); let s = epx / (1.0 + epx); *pb.tape.get_unchecked_mut(self.output_offset + i) = s; *pb.tape.get_unchecked_mut(self.input_offset + i) = s * (1.0 - s); } else { - let s = 1.0 / (1.0 + f32::powf(e, -x)); + let s = 1.0 / (1.0 + expf(-x)); *pb.tape.get_unchecked_mut(self.output_offset + i) = s; *pb.tape.get_unchecked_mut(self.input_offset + i) = s * (1.0 - s); } @@ -116,7 +108,7 @@ impl BlockTrait for BlockSigmoid unsafe { for i in 0..self.num_inputs as usize { let x = *pb.tape.get_unchecked_mut(self.input_offset + i); - *pb.tape.get_unchecked_mut(self.output_offset + i) = 1.0 / (1.0 + f32::powf(e, -x)); + *pb.tape.get_unchecked_mut(self.output_offset + i) = 1.0 / (1.0 + expf(-x)); } block_helpers::forward(further_blocks, fb, pb); } // unsafe end @@ -129,10 +121,8 @@ mod tests { use super::*; use crate::block_misc; use crate::feature_buffer; - use crate::feature_buffer::HashAndValueAndSeq; - use crate::vwmap; - use block_helpers::{slearn2, spredict2}; - use block_misc::{Observe}; + use block_helpers::slearn2; + use block_misc::Observe; use crate::assert_epsilon; fn fb_vec() -> feature_buffer::FeatureBuffer { @@ -152,8 +142,8 @@ mod tests { let mut mi = model_instance::ModelInstance::new_empty().unwrap(); let mut bg = BlockGraph::new(); let input_block = block_misc::new_const_block(&mut bg, vec![2.0]).unwrap(); - let sigmoid_block = new_sigmoid_block(&mut bg, &mi, input_block).unwrap(); - let observe_block = block_misc::new_observe_block(&mut bg, sigmoid_block, Observe::Forward, Some(1.0)).unwrap(); + let sigmoid_block = new_sigmoid_block(&mut bg, input_block).unwrap(); + block_misc::new_observe_block(&mut bg, sigmoid_block, Observe::Forward, Some(1.0)).unwrap(); bg.finalize(); bg.allocate_and_init_weights(&mi); @@ -165,11 +155,11 @@ mod tests { } fn test_simple_negative() { - let mut mi = model_instance::ModelInstance::new_empty().unwrap(); + let mi = model_instance::ModelInstance::new_empty().unwrap(); let mut bg = BlockGraph::new(); let input_block = block_misc::new_const_block(&mut bg, vec![-2.0]).unwrap(); - let sigmoid_block = new_sigmoid_block(&mut bg, &mi, input_block).unwrap(); - let observe_block = block_misc::new_observe_block(&mut bg, sigmoid_block, Observe::Forward, Some(1.0)).unwrap(); + let sigmoid_block = new_sigmoid_block(&mut bg, input_block).unwrap(); + block_misc::new_observe_block(&mut bg, sigmoid_block, Observe::Forward, Some(1.0)).unwrap(); bg.finalize(); bg.allocate_and_init_weights(&mi); diff --git a/src/block_tanh.rs b/src/block_tanh.rs index 798a408c..a334d35b 100644 --- a/src/block_tanh.rs +++ b/src/block_tanh.rs @@ -1,23 +1,15 @@ use std::any::Any; -use std::io; -use merand48::*; -use core::arch::x86_64::*; use std::error::Error; -use std::mem::{self, MaybeUninit}; +use libm::tanhf; -use crate::optimizer; use crate::regressor; use crate::model_instance; use crate::feature_buffer; use crate::port_buffer; -use crate::consts; use crate::block_helpers; use crate::graph; -use optimizer::OptimizerTrait; use regressor::BlockTrait; -use block_helpers::{Weight, WeightAndOptimizerData}; use crate::graph::{BlockGraph}; -use crate::libm::tanhf; pub struct BlockTanh { @@ -28,12 +20,11 @@ pub struct BlockTanh { pub fn new_tanh_block( bg: &mut graph::BlockGraph, - mi: &model_instance::ModelInstance, input: graph::BlockPtrOutput ) -> Result> { let num_inputs = bg.get_num_output_values(vec![&input]); assert!(num_inputs != 0); - let mut block = Box::new(BlockTanh { + let block = Box::new(BlockTanh { output_offset: usize::MAX, input_offset: usize::MAX, num_inputs: num_inputs, @@ -50,7 +41,7 @@ impl BlockTrait for BlockTanh self } - fn allocate_and_init_weights(&mut self, mi: &model_instance::ModelInstance) { + fn allocate_and_init_weights(&mut self, _mi: &model_instance::ModelInstance) { } fn get_num_output_slots(&self) -> usize {1} @@ -129,10 +120,8 @@ mod tests { use super::*; use crate::block_misc; use crate::feature_buffer; - use crate::feature_buffer::HashAndValueAndSeq; - use crate::vwmap; - use block_helpers::{slearn2, spredict2}; - use block_misc::{Observe}; + use block_helpers::slearn2; + use block_misc::Observe; use crate::assert_epsilon; fn fb_vec() -> feature_buffer::FeatureBuffer { @@ -152,8 +141,8 @@ mod tests { let mut mi = model_instance::ModelInstance::new_empty().unwrap(); let mut bg = BlockGraph::new(); let input_block = block_misc::new_const_block(&mut bg, vec![2.0]).unwrap(); - let tanh_block = new_tanh_block(&mut bg, &mi, input_block).unwrap(); - let observe_block = block_misc::new_observe_block(&mut bg, tanh_block, Observe::Forward, Some(1.0)).unwrap(); + let tanh_block = new_tanh_block(&mut bg, input_block).unwrap(); + block_misc::new_observe_block(&mut bg, tanh_block, Observe::Forward, Some(1.0)).unwrap(); bg.finalize(); bg.allocate_and_init_weights(&mi); @@ -164,11 +153,11 @@ mod tests { assert_epsilon!(slearn2 (&mut bg, &fb, &mut pb, true), 2.0); // tanh desnt learn } fn test_simple_negative() { - let mut mi = model_instance::ModelInstance::new_empty().unwrap(); + let mi = model_instance::ModelInstance::new_empty().unwrap(); let mut bg = BlockGraph::new(); let input_block = block_misc::new_const_block(&mut bg, vec![-2.0]).unwrap(); - let tanh_block = new_tanh_block(&mut bg, &mi, input_block).unwrap(); - let observe_block = block_misc::new_observe_block(&mut bg, tanh_block, Observe::Forward, Some(1.0)).unwrap(); + let tanh_block = new_tanh_block(&mut bg, input_block).unwrap(); + block_misc::new_observe_block(&mut bg, tanh_block, Observe::Forward, Some(1.0)).unwrap(); bg.finalize(); bg.allocate_and_init_weights(&mi); diff --git a/src/regressor.rs b/src/regressor.rs index 45b66405..a1e2f403 100644 --- a/src/regressor.rs +++ b/src/regressor.rs @@ -210,20 +210,21 @@ impl Regressor { } match activation { + NNActivation::None => { }, NNActivation::Relu => { output = block_relu::new_relu_block(&mut bg, &mi, output).unwrap(); println!("Relu layer"); }, NNActivation::LeakyRelu => { - output = block_leaky_relu::new_leaky_rely_block(&mut bg, &mi, output).unwrap(); + output = block_leaky_relu::new_leaky_relu_block(&mut bg, output).unwrap(); println!("LeakyRelu layer"); }, NNActivation::Tanh => { - output = block_tanh::new_tanh_block(&mut bg, &mi, output).unwrap(); + output = block_tanh::new_tanh_block(&mut bg, output).unwrap(); println!("Tanh layer"); }, NNActivation::Sigmoid => { - output = block_sigmoid::new_sigmoid_block(&mut bg, &mi, output).unwrap(); + output = block_sigmoid::new_sigmoid_block(&mut bg, output).unwrap(); println!("Sigmoid layer"); } }