Skip to content

Commit

Permalink
Remove commented code
Browse files Browse the repository at this point in the history
  • Loading branch information
daquexian committed Aug 21, 2019
1 parent 1dd7e80 commit 934d215
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 51 deletions.
37 changes: 0 additions & 37 deletions dabnn/fused_binarize_im2col.h
Original file line number Diff line number Diff line change
Expand Up @@ -19,42 +19,6 @@ inline void fused_binarize_im2col(const Mat &im, const int kernel_h,
const int output_w =
(im.w + 2 * pad_w - (dilation_w * (kernel_w - 1) + 1)) / stride_w + 1;

// Mat temp(1, 1, kernel_h * kernel_w * output_h * output_w * im.c, DataType::Float);
// char *data_col = static_cast<char *>(temp);
// int input_y = 0;
// FORZ(output_y, output_h) {
// int input_x = 0;
// FORZ(output_x, output_w) {
// FORZ(kh, kernel_h) {
// int y = input_y - pad_h + kh * dilation_h;
// const char *data_im = static_cast<char *>(im.data) +
// y * im.w * im.c * im.elemsize;
// FORZ(kw, kernel_w) {
// int x = input_x - pad_w + kw * dilation_w;
// if (y < 0 || y >= im.h || x < 0 || x >= im.w) {
// memset(data_col, 0, im.c * im.elemsize);
// } else {
// memcpy(data_col, data_im + x * im.c * im.elemsize,
// im.c * im.elemsize);
// }
// data_col += im.c * im.elemsize;
// }
// }
// input_x += stride_w;
// }
// input_y += stride_h;
// }
// pack_64(static_cast<float *>(temp.data), col.data, temp.total());
// if (true) {
// Mat temp(1, 1, kernel_h * kernel_w * output_h * output_w * im.c, DataType::Float);
// im2col(im, kernel_h, kernel_w, pad_h, pad_w, stride_h, stride_w, dilation_h, dilation_w, temp);
// pack_mat(temp, col);
// } else {
// Mat temp(1, 9999999, DataType::Bit);
// pack_mat_128_opt(im, temp);
// im2col(temp, kernel_h, kernel_w, pad_h, pad_w, stride_h, stride_w, dilation_h, dilation_w, col);
// }

// TODO: More elegant way
static char buf[2400000];

Expand Down Expand Up @@ -85,7 +49,6 @@ inline void fused_binarize_im2col(const Mat &im, const int kernel_h,
// len: the number of elements in one column
const size_t len = (buf_ptr - buf) / im.elemsize;
const size_t len_aligned_128 = (len + 127) / 128 * 128;
// BNN_ASSERT(len == len_aligned_128, "");
// pad the buffer so that its length aligns to 128
memset(buf_ptr, 0, (len_aligned_128 - len) * im.elemsize);

Expand Down
10 changes: 0 additions & 10 deletions dabnn/layers/BinConv.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -120,16 +120,6 @@ void BinConv::forward_impl() const {
bconv_3x3(*padded_mat, *weight_mat, *output_mat, stride_h);
} else if (gemm_compatible()) {
output_mat->fill<float>(0.f);
// pack_mat_64(*input_mat, *binarized_mat);
// bnn::im2col(*binarized_mat, weight_mat->h, weight_mat->w,
// pad_h, pad_w, stride_h, stride_w, 1,
// 1, *col_mat);

// const auto len = output_mat->h * output_mat->w * weight_mat->h *
// weight_mat->w * input_mat->elem_c;
// Mat temp(1, 1, len, bnn::DataType::Float);
// im2col(*input_mat, weight_mat->h, weight_mat->w, pad_h, pad_w,
// stride_h, stride_w, 1, 1, temp); pack_mat(temp, *col_mat);

bnn::fused_binarize_im2col(*input_mat, weight_mat->h, weight_mat->w,
pad_h, pad_w, stride_h, stride_w, 1, 1,
Expand Down
4 changes: 0 additions & 4 deletions dabnn/net.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -99,10 +99,6 @@ void Net::prepare() {
shape[0], shape[1], shape[2], shape[3],
bnn::DataType::Bit, len, false));
pack_mat(*tmp, *mat_map_[name]);
// add_mat(name, std::make_shared<Mat>(
// shape[0], shape[1], shape[2], shape[3],
// const_cast<uint64_t *>(data),
// bnn::DataType::Bit, false));
} else {
#endif // __aarch64__
add_mat(name, std::make_shared<Mat>(
Expand Down

0 comments on commit 934d215

Please sign in to comment.