Skip to content

Commit

Permalink
make cargo build happy
Browse files Browse the repository at this point in the history
  • Loading branch information
yonatankarni committed Jan 2, 2023
1 parent 5f085f4 commit 4426515
Show file tree
Hide file tree
Showing 5 changed files with 38 additions and 70 deletions.
30 changes: 9 additions & 21 deletions src/block_leaky_relu.rs
Original file line number Diff line number Diff line change
@@ -1,22 +1,13 @@
use std::any::Any;
use std::io;
use merand48::*;
use core::arch::x86_64::*;
use std::error::Error;
use std::mem::{self, MaybeUninit};


use crate::optimizer;
use crate::regressor;
use crate::model_instance;
use crate::feature_buffer;
use crate::port_buffer;
use crate::consts;
use crate::block_helpers;
use crate::graph;
use optimizer::OptimizerTrait;
use regressor::BlockTrait;
use block_helpers::{Weight, WeightAndOptimizerData};
use crate::graph::{BlockGraph};


Expand All @@ -29,12 +20,11 @@ pub struct BlockLeakyRELU {


pub fn new_leaky_relu_block(bg: &mut BlockGraph,
mi: &model_instance::ModelInstance,
input: graph::BlockPtrOutput,
) -> Result<graph::BlockPtrOutput, Box<dyn Error>> {
let num_inputs = bg.get_num_output_values(vec![&input]);
assert!(num_inputs != 0);
let mut block = Box::new(BlockLeakyRELU {
let block = Box::new(BlockLeakyRELU {
output_offset: usize::MAX,
input_offset: usize::MAX,
num_inputs: num_inputs,
Expand Down Expand Up @@ -110,7 +100,7 @@ impl BlockTrait for BlockLeakyRELU
}


fn allocate_and_init_weights(&mut self, mi: &model_instance::ModelInstance) {}
fn allocate_and_init_weights(&mut self, _mi: &model_instance::ModelInstance) {}

fn get_num_output_values(&self, output: graph::OutputSlot) -> usize {
assert!(output.get_output_index() == 0);
Expand All @@ -136,10 +126,8 @@ mod tests {
use super::*;
use crate::block_misc;
use crate::feature_buffer;
use crate::feature_buffer::HashAndValueAndSeq;
use crate::vwmap;
use block_helpers::{slearn2, spredict2};
use block_misc::{Observe};
use block_helpers::slearn2;
use block_misc::Observe;
use crate::assert_epsilon;

fn fb_vec() -> feature_buffer::FeatureBuffer {
Expand All @@ -159,8 +147,8 @@ mod tests {
let mut mi = model_instance::ModelInstance::new_empty().unwrap();
let mut bg = BlockGraph::new();
let input_block = block_misc::new_const_block(&mut bg, vec![2.0]).unwrap();
let leaky_relu_block = new_leaky_relu_block(&mut bg, &mi, input_block).unwrap();
let observe_block = block_misc::new_observe_block(&mut bg, leaky_relu_block, Observe::Forward, Some(1.0)).unwrap();
let leaky_relu_block = new_leaky_relu_block(&mut bg, input_block).unwrap();
block_misc::new_observe_block(&mut bg, leaky_relu_block, Observe::Forward, Some(1.0)).unwrap();
bg.finalize();
bg.allocate_and_init_weights(&mi);

Expand All @@ -172,11 +160,11 @@ mod tests {
}

fn test_simple_negative() {
let mut mi = model_instance::ModelInstance::new_empty().unwrap();
let mi = model_instance::ModelInstance::new_empty().unwrap();
let mut bg = BlockGraph::new();
let input_block = block_misc::new_const_block(&mut bg, vec![-2.0]).unwrap();
let leaky_relu_block = new_leaky_relu_block(&mut bg, &mi, input_block).unwrap();
let observe_block = block_misc::new_observe_block(&mut bg, leaky_relu_block, Observe::Forward, Some(1.0)).unwrap();
let leaky_relu_block = new_leaky_relu_block(&mut bg, input_block).unwrap();
block_misc::new_observe_block(&mut bg, leaky_relu_block, Observe::Forward, Some(1.0)).unwrap();
bg.finalize();
bg.allocate_and_init_weights(&mi);

Expand Down
2 changes: 1 addition & 1 deletion src/block_relu.rs
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ mod tests {
use crate::feature_buffer::HashAndValueAndSeq;
use crate::vwmap;
use block_helpers::{slearn2, spredict2};
use block_misc::{Observe};
use block_misc::Observe;
use crate::assert_epsilon;

fn fb_vec() -> feature_buffer::FeatureBuffer {
Expand Down
38 changes: 14 additions & 24 deletions src/block_sigmoid.rs
Original file line number Diff line number Diff line change
@@ -1,21 +1,14 @@
use std::any::Any;
use std::io;
use merand48::*;
use core::arch::x86_64::*;
use std::error::Error;
use std::mem::{self, MaybeUninit};
use libm::expf;

use crate::optimizer;
use crate::regressor;
use crate::model_instance;
use crate::feature_buffer;
use crate::port_buffer;
use crate::consts;
use crate::block_helpers;
use crate::graph;
use optimizer::OptimizerTrait;
use regressor::BlockTrait;
use block_helpers::{Weight, WeightAndOptimizerData};
use crate::graph::{BlockGraph};

pub struct BlockSigmoid {
Expand All @@ -25,12 +18,11 @@ pub struct BlockSigmoid {
}

pub fn new_sigmoid_block(bg: &mut graph::BlockGraph,
mi: &model_instance::ModelInstance,
input: graph::BlockPtrOutput,
) -> Result<graph::BlockPtrOutput, Box<dyn Error>> {
let num_inputs = bg.get_num_output_values(vec![&input]);
assert!(num_inputs != 0);
let mut block = Box::new(BlockSigmoid {
let block = Box::new(BlockSigmoid {
output_offset: usize::MAX,
input_offset: usize::MAX,
num_inputs: num_inputs,
Expand All @@ -46,7 +38,7 @@ impl BlockTrait for BlockSigmoid
self
}

fn allocate_and_init_weights(&mut self, mi: &model_instance::ModelInstance) {}
fn allocate_and_init_weights(&mut self, _mi: &model_instance::ModelInstance) {}

fn get_num_output_slots(&self) -> usize { 1 }

Expand Down Expand Up @@ -82,13 +74,13 @@ impl BlockTrait for BlockSigmoid

// for now doing the actual slow sigmoid computation. once we establish a baseline,
// we can replace with a fast approximation or a lookup table
if x < 0. {
let epx = f32::powf(e, x);
if x < 0.0 {
let epx = expf(x);
let s = epx / (1.0 + epx);
*pb.tape.get_unchecked_mut(self.output_offset + i) = s;
*pb.tape.get_unchecked_mut(self.input_offset + i) = s * (1.0 - s);
} else {
let s = 1.0 / (1.0 + f32::powf(e, -x));
let s = 1.0 / (1.0 + expf(-x));
*pb.tape.get_unchecked_mut(self.output_offset + i) = s;
*pb.tape.get_unchecked_mut(self.input_offset + i) = s * (1.0 - s);
}
Expand Down Expand Up @@ -116,7 +108,7 @@ impl BlockTrait for BlockSigmoid
unsafe {
for i in 0..self.num_inputs as usize {
let x = *pb.tape.get_unchecked_mut(self.input_offset + i);
*pb.tape.get_unchecked_mut(self.output_offset + i) = 1.0 / (1.0 + f32::powf(e, -x));
*pb.tape.get_unchecked_mut(self.output_offset + i) = 1.0 / (1.0 + expf(-x));
}
block_helpers::forward(further_blocks, fb, pb);
} // unsafe end
Expand All @@ -129,10 +121,8 @@ mod tests {
use super::*;
use crate::block_misc;
use crate::feature_buffer;
use crate::feature_buffer::HashAndValueAndSeq;
use crate::vwmap;
use block_helpers::{slearn2, spredict2};
use block_misc::{Observe};
use block_helpers::slearn2;
use block_misc::Observe;
use crate::assert_epsilon;

fn fb_vec() -> feature_buffer::FeatureBuffer {
Expand All @@ -152,8 +142,8 @@ mod tests {
let mut mi = model_instance::ModelInstance::new_empty().unwrap();
let mut bg = BlockGraph::new();
let input_block = block_misc::new_const_block(&mut bg, vec![2.0]).unwrap();
let sigmoid_block = new_sigmoid_block(&mut bg, &mi, input_block).unwrap();
let observe_block = block_misc::new_observe_block(&mut bg, sigmoid_block, Observe::Forward, Some(1.0)).unwrap();
let sigmoid_block = new_sigmoid_block(&mut bg, input_block).unwrap();
block_misc::new_observe_block(&mut bg, sigmoid_block, Observe::Forward, Some(1.0)).unwrap();
bg.finalize();
bg.allocate_and_init_weights(&mi);

Expand All @@ -165,11 +155,11 @@ mod tests {
}

fn test_simple_negative() {
let mut mi = model_instance::ModelInstance::new_empty().unwrap();
let mi = model_instance::ModelInstance::new_empty().unwrap();
let mut bg = BlockGraph::new();
let input_block = block_misc::new_const_block(&mut bg, vec![-2.0]).unwrap();
let sigmoid_block = new_sigmoid_block(&mut bg, &mi, input_block).unwrap();
let observe_block = block_misc::new_observe_block(&mut bg, sigmoid_block, Observe::Forward, Some(1.0)).unwrap();
let sigmoid_block = new_sigmoid_block(&mut bg, input_block).unwrap();
block_misc::new_observe_block(&mut bg, sigmoid_block, Observe::Forward, Some(1.0)).unwrap();
bg.finalize();
bg.allocate_and_init_weights(&mi);

Expand Down
31 changes: 10 additions & 21 deletions src/block_tanh.rs
Original file line number Diff line number Diff line change
@@ -1,23 +1,15 @@
use std::any::Any;
use std::io;
use merand48::*;
use core::arch::x86_64::*;
use std::error::Error;
use std::mem::{self, MaybeUninit};
use libm::tanhf;

use crate::optimizer;
use crate::regressor;
use crate::model_instance;
use crate::feature_buffer;
use crate::port_buffer;
use crate::consts;
use crate::block_helpers;
use crate::graph;
use optimizer::OptimizerTrait;
use regressor::BlockTrait;
use block_helpers::{Weight, WeightAndOptimizerData};
use crate::graph::{BlockGraph};
use crate::libm::tanhf;


pub struct BlockTanh {
Expand All @@ -28,12 +20,11 @@ pub struct BlockTanh {


pub fn new_tanh_block( bg: &mut graph::BlockGraph,
mi: &model_instance::ModelInstance,
input: graph::BlockPtrOutput
) -> Result<graph::BlockPtrOutput, Box<dyn Error>> {
let num_inputs = bg.get_num_output_values(vec![&input]);
assert!(num_inputs != 0);
let mut block = Box::new(BlockTanh {
let block = Box::new(BlockTanh {
output_offset: usize::MAX,
input_offset: usize::MAX,
num_inputs: num_inputs,
Expand All @@ -50,7 +41,7 @@ impl BlockTrait for BlockTanh
self
}

fn allocate_and_init_weights(&mut self, mi: &model_instance::ModelInstance) {
fn allocate_and_init_weights(&mut self, _mi: &model_instance::ModelInstance) {
}

fn get_num_output_slots(&self) -> usize {1}
Expand Down Expand Up @@ -129,10 +120,8 @@ mod tests {
use super::*;
use crate::block_misc;
use crate::feature_buffer;
use crate::feature_buffer::HashAndValueAndSeq;
use crate::vwmap;
use block_helpers::{slearn2, spredict2};
use block_misc::{Observe};
use block_helpers::slearn2;
use block_misc::Observe;
use crate::assert_epsilon;

fn fb_vec() -> feature_buffer::FeatureBuffer {
Expand All @@ -152,8 +141,8 @@ mod tests {
let mut mi = model_instance::ModelInstance::new_empty().unwrap();
let mut bg = BlockGraph::new();
let input_block = block_misc::new_const_block(&mut bg, vec![2.0]).unwrap();
let tanh_block = new_tanh_block(&mut bg, &mi, input_block).unwrap();
let observe_block = block_misc::new_observe_block(&mut bg, tanh_block, Observe::Forward, Some(1.0)).unwrap();
let tanh_block = new_tanh_block(&mut bg, input_block).unwrap();
block_misc::new_observe_block(&mut bg, tanh_block, Observe::Forward, Some(1.0)).unwrap();
bg.finalize();
bg.allocate_and_init_weights(&mi);

Expand All @@ -164,11 +153,11 @@ mod tests {
assert_epsilon!(slearn2 (&mut bg, &fb, &mut pb, true), 2.0); // tanh desnt learn
}
fn test_simple_negative() {
let mut mi = model_instance::ModelInstance::new_empty().unwrap();
let mi = model_instance::ModelInstance::new_empty().unwrap();
let mut bg = BlockGraph::new();
let input_block = block_misc::new_const_block(&mut bg, vec![-2.0]).unwrap();
let tanh_block = new_tanh_block(&mut bg, &mi, input_block).unwrap();
let observe_block = block_misc::new_observe_block(&mut bg, tanh_block, Observe::Forward, Some(1.0)).unwrap();
let tanh_block = new_tanh_block(&mut bg, input_block).unwrap();
block_misc::new_observe_block(&mut bg, tanh_block, Observe::Forward, Some(1.0)).unwrap();
bg.finalize();
bg.allocate_and_init_weights(&mi);

Expand Down
7 changes: 4 additions & 3 deletions src/regressor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,20 +210,21 @@ impl Regressor {
}

match activation {
NNActivation::None => { },
NNActivation::Relu => {
output = block_relu::new_relu_block(&mut bg, &mi, output).unwrap();
println!("Relu layer");
},
NNActivation::LeakyRelu => {
output = block_leaky_relu::new_leaky_rely_block(&mut bg, &mi, output).unwrap();
output = block_leaky_relu::new_leaky_relu_block(&mut bg, output).unwrap();
println!("LeakyRelu layer");
},
NNActivation::Tanh => {
output = block_tanh::new_tanh_block(&mut bg, &mi, output).unwrap();
output = block_tanh::new_tanh_block(&mut bg, output).unwrap();
println!("Tanh layer");
},
NNActivation::Sigmoid => {
output = block_sigmoid::new_sigmoid_block(&mut bg, &mi, output).unwrap();
output = block_sigmoid::new_sigmoid_block(&mut bg, output).unwrap();
println!("Sigmoid layer");
}
}
Expand Down

0 comments on commit 4426515

Please sign in to comment.