Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add a padding operator. #95

Merged
merged 11 commits into from
Jul 17, 2021
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
build/
experiments/
smaug/operators/padding_op_test.h
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why putting this file here?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, please remove this file from this PR. You are welcome to send a separate PR with a .gitignore if you like, but if so, it should only contain build products, not other code (like padding_op_test.h).

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Mainly to facilitate my development. But I think adding the initial two lines would be beneficial to this repo (the third line is meaningless now). But I can delete the file if you think it doesn't make sense.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

See #96

1 change: 1 addition & 0 deletions make/Makefile.common
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ TESTS = smaug/core/tensor_test.cpp \
smaug/operators/split_op_test.cpp \
smaug/operators/reshape_op_test.cpp \
smaug/operators/repeat_op_test.cpp \
smaug/operators/padding_op_test.cpp \
smaug/operators/control_flow_ops_test.cpp \
smaug/operators/smv/smv_convolution_tiling_test.cpp \
smaug/operators/smv/smv_convolution_op_test.cpp \
Expand Down
45 changes: 24 additions & 21 deletions smaug/core/backend.cpp
Original file line number Diff line number Diff line change
@@ -1,38 +1,39 @@
#include "smaug/core/backend.h"
#include "smaug/operators/batch_norm_op.h"
#include "smaug/operators/concat_op.h"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Code rearrangement generally is confusing :) Moving forward, please move this to a separate PR.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I definitely appreciate you sorting all the includes in alphabetical order! It's certainly cleaner than before. But yes, refactoring and code cleanups should be done separately if it's not related to the main PR purpose.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'll fix this

#include "smaug/operators/control_flow_ops.h"
#include "smaug/operators/convolution_op.h"
#include "smaug/operators/data_op.h"
#include "smaug/operators/depthwise_convolution_op.h"
#include "smaug/operators/eltwise_add_op.h"
#include "smaug/operators/eltwise_mul_op.h"
#include "smaug/operators/less_op.h"
#include "smaug/operators/greater_op.h"
#include "smaug/operators/control_flow_ops.h"
#include "smaug/operators/elu_op.h"
#include "smaug/operators/greater_op.h"
#include "smaug/operators/inner_product_op.h"
#include "smaug/operators/less_op.h"
#include "smaug/operators/padding_op.h"
#include "smaug/operators/pooling_op.h"
#include "smaug/operators/relu_op.h"
#include "smaug/operators/reorder_op.h"
#include "smaug/operators/concat_op.h"
#include "smaug/operators/split_op.h"
#include "smaug/operators/reshape_op.h"
#include "smaug/operators/repeat_op.h"
#include "smaug/operators/reshape_op.h"
#include "smaug/operators/sigmoid_op.h"
#include "smaug/operators/softmax_op.h"
#include "smaug/operators/tanh_op.h"
#include "smaug/operators/smv/smv_batch_norm_op.h"
#include "smaug/operators/smv/smv_convolution_op.h"
#include "smaug/operators/smv/smv_eltwise_add_op.h"
#include "smaug/operators/smv/smv_eltwise_mul_op.h"
#include "smaug/operators/smv/smv_elu_op.h"
#include "smaug/operators/smv/smv_greater_op.h"
#include "smaug/operators/smv/smv_inner_product_op.h"
#include "smaug/operators/smv/smv_less_op.h"
#include "smaug/operators/smv/smv_pooling_op.h"
#include "smaug/operators/smv/smv_batch_norm_op.h"
#include "smaug/operators/smv/smv_relu_op.h"
#include "smaug/operators/smv/smv_elu_op.h"
#include "smaug/operators/smv/smv_tanh_op.h"
#include "smaug/operators/smv/smv_sigmoid_op.h"
#include "smaug/operators/smv/smv_softmax_op.h"
#include "smaug/operators/smv/smv_eltwise_add_op.h"
#include "smaug/operators/smv/smv_eltwise_mul_op.h"
#include "smaug/operators/smv/smv_less_op.h"
#include "smaug/operators/smv/smv_greater_op.h"
#include "smaug/operators/smv/smv_tanh_op.h"
#include "smaug/operators/softmax_op.h"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(Also, if this was meant to sort the header includes, these three should not be at the bottom :] )

#include "smaug/operators/split_op.h"
#include "smaug/operators/tanh_op.h"

namespace smaug {

Expand Down Expand Up @@ -79,6 +80,7 @@ DEF_CREATE_OP(EluOp, ReferenceBackend)
DEF_CREATE_OP(SeluOp, ReferenceBackend)
DEF_CREATE_OP(TanhOp, ReferenceBackend)
DEF_CREATE_OP(HardTanhOp, ReferenceBackend)
DEF_CREATE_OP(PaddingOp, ReferenceBackend)

DEF_CREATE_SMV_OP(ConvolutionOp)
DEF_CREATE_SMV_OP(InnerProductOp)
Expand Down Expand Up @@ -108,13 +110,15 @@ DEF_CREATE_OP(RepeatOp, SmvBackend)
DEF_CREATE_OP(FlattenOp, SmvBackend)
DEF_CREATE_OP(SwitchOp, SmvBackend)
DEF_CREATE_OP(MergeOp, SmvBackend)
DEF_CREATE_OP(PaddingOp, SmvBackend)

// for simple tracing.
namespace ref {
const unsigned kConvolutionHw = 0x0001;
const unsigned kInnerProductHw = 0x0002;
const unsigned kEltwiseOpHw = 0x0003;
const unsigned kBatchNormHw = 0x0004;
const unsigned kPoolingHw = 0x0005;
const unsigned kConvolutionHw = 0x0001; // 0x0001;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I can't remember exactly, but we do want these accelerator blocks to be traced with different accelerator IDs. @xyzsam for his perspective.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, leave this unchanged here; there is an argument for both sides but regardless, it should not be done in this PR.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is based on the advice @xyzsam gives for another problem. But it is not relevant to this PR. I'll delete it.

const unsigned kInnerProductHw = 0x0001; // 0x0002;
const unsigned kEltwiseOpHw = 0x0001; // 0x0003;
const unsigned kBatchNormHw = 0x0001; // 0x0004;
const unsigned kPoolingHw = 0x0001; // 0x0005;
} // namespace ref

namespace smv {
Expand All @@ -140,5 +144,4 @@ float* spad1;
float* spad2;
} // namespace smv


} // namespace smaug
6 changes: 4 additions & 2 deletions smaug/core/backend.h
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,8 @@ template <typename Backend> class EluOp;
template <typename Backend> class SeluOp;
template <typename Backend> class TanhOp;
template <typename Backend> class HardTanhOp;
template <typename Backend> class PaddingOp;

#endif

/**
Expand Down Expand Up @@ -123,9 +125,9 @@ class ReferenceBackend {
DECL_CREATE_OP(SeluOp);
DECL_CREATE_OP(TanhOp);
DECL_CREATE_OP(HardTanhOp);
DECL_CREATE_OP(PaddingOp);

#undef DECL_CREATE_OP

};

/**
Expand Down Expand Up @@ -238,10 +240,10 @@ class SmvBackend {
DECL_CREATE_OP(FlattenOp);
DECL_CREATE_OP(SwitchOp);
DECL_CREATE_OP(MergeOp);
DECL_CREATE_OP(PaddingOp);

#undef DECL_SMV_OP
#undef DECL_CREATE_OP

};

} // namespace smaug
Expand Down
51 changes: 28 additions & 23 deletions smaug/core/network_builder.cpp
Original file line number Diff line number Diff line change
@@ -1,56 +1,57 @@
#include <iostream>
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Again, it'd better to put the code rearrangement into a separate PR.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'll fix this.

#include <fstream>
#include <fcntl.h>
#include <fstream>
#include <iostream>

#include <google/protobuf/text_format.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/text_format.h>

#include "smaug/core/backend.h"
#include "smaug/core/tensor.h"
#include "smaug/core/graph.pb.h"
#include "smaug/core/network.h"
#include "smaug/core/network_builder.h"
#include "smaug/core/workspace.h"
#include "smaug/core/graph.pb.h"
#include "smaug/core/node.pb.h"
#include "smaug/core/tensor.h"
#include "smaug/core/tensor.pb.h"
#include "smaug/core/types.pb.h"
#include "smaug/operators/common.h"
#include "smaug/core/workspace.h"
#include "smaug/operators/batch_norm_op.h"
#include "smaug/operators/common.h"
#include "smaug/operators/concat_op.h"
#include "smaug/operators/control_flow_ops.h"
#include "smaug/operators/convolution_op.h"
#include "smaug/operators/data_op.h"
#include "smaug/operators/depthwise_convolution_op.h"
#include "smaug/operators/eltwise_add_op.h"
#include "smaug/operators/eltwise_mul_op.h"
#include "smaug/operators/less_op.h"
#include "smaug/operators/greater_op.h"
#include "smaug/operators/control_flow_ops.h"
#include "smaug/operators/elu_op.h"
#include "smaug/operators/greater_op.h"
#include "smaug/operators/inner_product_op.h"
#include "smaug/operators/less_op.h"
#include "smaug/operators/padding_op.h"
#include "smaug/operators/pooling_op.h"
#include "smaug/operators/relu_op.h"
#include "smaug/operators/reorder_op.h"
#include "smaug/operators/concat_op.h"
#include "smaug/operators/split_op.h"
#include "smaug/operators/reshape_op.h"
#include "smaug/operators/repeat_op.h"
#include "smaug/operators/reshape_op.h"
#include "smaug/operators/sigmoid_op.h"
#include "smaug/operators/softmax_op.h"
#include "smaug/operators/tanh_op.h"
#include "smaug/operators/smv/smv_batch_norm_op.h"
#include "smaug/operators/smv/smv_convolution_op.h"
#include "smaug/operators/smv/smv_eltwise_add_op.h"
#include "smaug/operators/smv/smv_eltwise_mul_op.h"
#include "smaug/operators/smv/smv_elu_op.h"
#include "smaug/operators/smv/smv_greater_op.h"
#include "smaug/operators/smv/smv_inner_product_op.h"
#include "smaug/operators/smv/smv_less_op.h"
#include "smaug/operators/smv/smv_pooling_op.h"
#include "smaug/operators/smv/smv_batch_norm_op.h"
#include "smaug/operators/smv/smv_relu_op.h"
#include "smaug/operators/smv/smv_elu_op.h"
#include "smaug/operators/smv/smv_tanh_op.h"
#include "smaug/operators/smv/smv_sigmoid_op.h"
#include "smaug/operators/smv/smv_softmax_op.h"
#include "smaug/operators/smv/smv_eltwise_add_op.h"
#include "smaug/operators/smv/smv_eltwise_mul_op.h"
#include "smaug/operators/smv/smv_less_op.h"
#include "smaug/operators/smv/smv_greater_op.h"
#include "smaug/utility/utils.h"
#include "smaug/operators/smv/smv_tanh_op.h"
#include "smaug/operators/softmax_op.h"
#include "smaug/operators/split_op.h"
#include "smaug/operators/tanh_op.h"
#include "smaug/utility/debug_stream.h"
#include "smaug/utility/utils.h"

using namespace smaug;
using namespace std;
Expand Down Expand Up @@ -263,6 +264,10 @@ static void createAndAddOperator(const NodeProto& node,
} else if (type == OpType::Tanh) {
auto op = Backend::createTanhOp(name, workspace);
network->addOperator(op);
} else if (type == OpType::Padding) { // how to set this
auto op = Backend::createPaddingOp(name, workspace);
op->setPadder(node.params().padding_params().padding_size());
network->addOperator(op);
} else if (type == OpType::HardTanh) {
auto op = Backend::createHardTanhOp(name, workspace);
network->addOperator(op);
Expand Down
5 changes: 5 additions & 0 deletions smaug/core/node.proto
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ message PoolParams {
repeated int32 pool_size = 2;
}

message PaddingParams {
int32 padding_size = 1;
}

message ConcatParams {
int32 concat_axis = 1;
}
Expand Down Expand Up @@ -52,6 +56,7 @@ message Params {
PoolParams pool_params = 2;
ConcatParams concat_params = 4;
SplitParams split_params = 5;
PaddingParams padding_params = 6;
}
ActivationParams act_params = 3;
}
Expand Down
121 changes: 61 additions & 60 deletions smaug/core/types.proto
Original file line number Diff line number Diff line change
Expand Up @@ -3,79 +3,80 @@ syntax = "proto3";
package smaug;

enum DataType {
UnknownDataType = 0;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we restore the indent? If you really want to change this, please change all the proto files in another PR.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

+1

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Of course.

Int32 = 1;
Int64 = 2;
Float16 = 3;
Float32 = 4;
Float64 = 5;
Bool = 6;
UnknownDataType = 0;
Int32 = 1;
Int64 = 2;
Float16 = 3;
Float32 = 4;
Float64 = 5;
Bool = 6;
}

enum DataLayout {
option allow_alias = true;
UnknownLayout = 0;
NCHW = 1;
NHWC = 2;
NC = 4;
CN = 8;
NCT = 16;
NTC = 32;
N = 64;
X = 127; // Elementwise
EndDataLayout = 64;
option allow_alias = true;
UnknownLayout = 0;
NCHW = 1;
NHWC = 2;
NC = 4;
CN = 8;
NCT = 16;
NTC = 32;
N = 64;
X = 127; // Elementwise
EndDataLayout = 64;
}

enum DataStorageFormat {
UnknownStorageFormat = 0;
Uncompressed = 1;
CSR = 2;
PackedCSR = 3;
UncompressedHalfPrecision = 4;
UnknownStorageFormat = 0;
Uncompressed = 1;
CSR = 2;
PackedCSR = 3;
UncompressedHalfPrecision = 4;
}

enum OpType {
UnknownOp = 0;
Convolution3d = 1;
ConvolutionDepthwise = 2;
MaxPooling = 3;
AveragePooling = 4;
InnerProduct = 5;
BatchNorm = 6;
Data = 7;
ReLU = 8;
LReLU = 9;
ELU = 10;
SELU = 11;
Tanh = 12;
HardTanh = 13;
Sigmoid = 14;
Softmax = 15;
EltwiseAdd = 16;
Reorder = 17;
EltwiseMul = 18;
Concat = 19;
Split = 20;
Reshape = 21;
Repeat = 22;
Less = 23;
LessEqual = 24;
Greater = 25;
GreaterEqual = 26;
Switch = 27;
Merge = 28;
UnknownOp = 0;
Convolution3d = 1;
ConvolutionDepthwise = 2;
MaxPooling = 3;
AveragePooling = 4;
InnerProduct = 5;
BatchNorm = 6;
Data = 7;
ReLU = 8;
LReLU = 9;
ELU = 10;
SELU = 11;
Tanh = 12;
HardTanh = 13;
Sigmoid = 14;
Softmax = 15;
EltwiseAdd = 16;
Reorder = 17;
EltwiseMul = 18;
Concat = 19;
Split = 20;
Reshape = 21;
Repeat = 22;
Less = 23;
LessEqual = 24;
Greater = 25;
GreaterEqual = 26;
Switch = 27;
Merge = 28;
Padding = 29;
}

enum PaddingType {
UnknownPadding = 0;
SamePadding = 1;
ValidPadding = 2;
UnknownPadding = 0;
SamePadding = 1;
ValidPadding = 2;
}

enum HostMemoryAccessPolicy {
UnknownMemoryPolicy = 0;
AllDma = 1;
AllAcp = 2;
AllCache = 3;
AllAcpWithDmaForWeights = 4;
UnknownMemoryPolicy = 0;
AllDma = 1;
AllAcp = 2;
AllCache = 3;
AllAcpWithDmaForWeights = 4;
}
Loading