diff --git a/example/image-classification/symbol_alexnet.R b/example/image-classification/symbol_alexnet.R new file mode 100644 index 000000000000..ec768c9adb14 --- /dev/null +++ b/example/image-classification/symbol_alexnet.R @@ -0,0 +1,36 @@ +library(mxnet) + +get_symbol <- function(num_classes = 1000) { + input_data <- mx.symbol.Variable(name = "data") + # stage 1 + conv1 <- mx.symbol.Convolution(data = input_data, kernel = c(11, 11), stride = c(4, 4), num_filter = 96) + relu1 <- mx.symbol.Activation(data = conv1, act_type = "relu") + pool1 <- mx.symbol.Pooling(data = relu1, pool_type = "max", kernel = c(3, 3), stride = c(2, 2)) + lrn1 <- mx.symbol.LRN(data = pool1, alpha = 0.0001, beta = 0.75, knorm = 1, nsize = 5) + # stage 2 + conv2 <- mx.symbol.Convolution(data = lrn1, kernel = c(5, 5), pad = c(2, 2), num_filter = 256) + relu2 <- mx.symbol.Activation(data = conv2, act_type = "relu") + pool2 <- mx.symbol.Pooling(data = relu2, kernel = c(3, 3), stride = c(2, 2), pool_type = "max") + lrn2 <- mx.symbol.LRN(data = pool2, alpha = 0.0001, beta = 0.75, knorm = 1, nsize = 5) + # stage 3 + conv3 <- mx.symbol.Convolution(data = lrn2, kernel = c(3, 3), pad = c(1, 1), num_filter = 384) + relu3 <- mx.symbol.Activation(data = conv3, act_type = "relu") + conv4 <- mx.symbol.Convolution(data = relu3, kernel = c(3, 3), pad = c(1, 1), num_filter = 384) + relu4 <- mx.symbol.Activation(data = conv4, act_type = "relu") + conv5 <- mx.symbol.Convolution(data = relu4, kernel = c(3, 3), pad = c(1, 1), num_filter = 256) + relu5 <- mx.symbol.Activation(data = conv5, act_type = "relu") + pool3 <- mx.symbol.Pooling(data = relu5, kernel = c(3, 3), stride = c(2, 2), pool_type = "max") + # stage 4 + flatten <- mx.symbol.Flatten(data = pool3) + fc1 <- mx.symbol.FullyConnected(data = flatten, num_hidden = 4096) + relu6 <- mx.symbol.Activation(data = fc1, act_type = "relu") + dropout1 <- mx.symbol.Dropout(data = relu6, p = 0.5) + # stage 5 + fc2 <- mx.symbol.FullyConnected(data = dropout1, num_hidden = 4096) + relu7 <- mx.symbol.Activation(data = fc2, act_type = "relu") + dropout2 <- mx.symbol.Dropout(data = relu7, p = 0.5) + # stage 6 + fc3 <- mx.symbol.FullyConnected(data = dropout2, num_hidden = num_classes) + softmax <- mx.symbol.SoftmaxOutput(data = fc3, name = 'softmax') + return(softmax) +} diff --git a/example/image-classification/symbol_googlenet.R b/example/image-classification/symbol_googlenet.R new file mode 100644 index 000000000000..8e7ac7224d55 --- /dev/null +++ b/example/image-classification/symbol_googlenet.R @@ -0,0 +1,67 @@ +library(mxnet) + +ConvFactory <- function(data, num_filter, kernel, stride = c(1, 1), pad = c(0, 0), + name = '', suffix = '') { + conv <- mx.symbol.Convolution(data = data, num_filter = num_filter, kernel = kernel, stride = stride, + pad = pad, name = paste('conv_', name, suffix, sep = "")) + act <- mx.symbol.Activation(data = conv, act_type = 'relu', name = paste('relu_', name, suffix, sep = '')) + return(act) +} + +InceptionFactory <- function(data, num_1x1, num_3x3red, num_3x3, + num_d5x5red, num_d5x5, pool, proj, name) { + # 1x1 + c1x1 <- ConvFactory(data = data, num_filter = num_1x1, kernel = c(1, 1), + name = paste(name, '_1x1', sep = '')) + # 3x3 reduce + 3x3 + c3x3r = ConvFactory(data = data, num_filter = num_3x3red, kernel = c(1, 1), + name = paste(name, '_3x3', sep = ''), suffix = '_reduce') + c3x3 = ConvFactory(data = c3x3r, num_filter = num_3x3, kernel = c(3, 3), + pad = c(1, 1), name = paste(name, '_3x3', sep = '')) + # double 3x3 reduce + double 3x3 + cd5x5r = ConvFactory(data = data, num_filter = num_d5x5red, kernel = c(1, 1), + name = paste(name, '_5x5', sep = ''), suffix = '_reduce') + cd5x5 = ConvFactory(data = cd5x5r, num_filter = num_d5x5, kernel = c(5, 5), pad = c(2, 2), + name = paste(name, '_5x5', sep = '')) + # pool + proj + pooling = mx.symbol.Pooling(data = data, kernel = c(3, 3), stride = c(1, 1), + pad = c(1, 1), pool_type = pool, + name = paste(pool, '_pool_', name, '_pool', sep = '')) + + cproj = ConvFactory(data = pooling, num_filter = proj, kernel = c(1, 1), + name = paste(name, '_proj', sep = '')) + # concat + concat_lst <- list() + concat_lst <- c(c1x1, c3x3, cd5x5, cproj) + concat_lst$num.args = 4 + concat_lst$name = paste('ch_concat_', name, '_chconcat', sep = '') + concat = mxnet:::mx.varg.symbol.Concat(concat_lst) + return(concat) +} + + +get_symbol <- function(num_classes = 1000) { + data <- mx.symbol.Variable("data") + conv1 <- ConvFactory(data, 64, kernel = c(7, 7), stride = c(2, 2), pad = c(3, 3), name = "conv1") + pool1 <- mx.symbol.Pooling(conv1, kernel = c(3, 3), stride = c(2, 2), pool_type = "max") + conv2 <- ConvFactory(pool1, 64, kernel = c(1, 1), stride = c(1, 1), name = "conv2") + conv3 <- ConvFactory(conv2, 192, kernel = c(3, 3), stride = c(1, 1), pad = c(1, 1), name = "conv3") + pool3 <- mx.symbol.Pooling(conv3, kernel = c(3, 3), stride = c(2, 2), pool_type = "max") + + in3a <- InceptionFactory(pool3, 64, 96, 128, 16, 32, "max", 32, name = "in3a") + in3b <- InceptionFactory(in3a, 128, 128, 192, 32, 96, "max", 64, name = "in3b") + pool4 <- mx.symbol.Pooling(in3b, kernel = c(3, 3), stride = c(2, 2), pool_type = "max") + in4a <- InceptionFactory(pool4, 192, 96, 208, 16, 48, "max", 64, name = "in4a") + in4b <- InceptionFactory(in4a, 160, 112, 224, 24, 64, "max", 64, name = "in4b") + in4c <- InceptionFactory(in4b, 128, 128, 256, 24, 64, "max", 64, name = "in4c") + in4d <- InceptionFactory(in4c, 112, 144, 288, 32, 64, "max", 64, name = "in4d") + in4e <- InceptionFactory(in4d, 256, 160, 320, 32, 128, "max", 128, name = "in4e") + pool5 <- mx.symbol.Pooling(in4e, kernel = c(3, 3), stride = c(2, 2), pool_type = "max") + in5a <- InceptionFactory(pool5, 256, 160, 320, 32, 128, "max", 128, name = "in5a") + in5b <- InceptionFactory(in5a, 384, 192, 384, 48, 128, "max", 128, name = "in5b") + pool6 <- mx.symbol.Pooling(in5b, kernel = c(7, 7), stride = c(1, 1), pool_type = "avg" ) + flatten <- mx.symbol.Flatten(data = pool6, name = 'flatten0') + fc1 <- mx.symbol.FullyConnected(data = flatten, num_hidden = num_classes) + softmax <- mx.symbol.SoftmaxOutput(data = fc1, name = 'softmax') + return(softmax) +} diff --git a/example/image-classification/symbol_inception-bn.R b/example/image-classification/symbol_inception-bn.R new file mode 100644 index 000000000000..59d871e08f4a --- /dev/null +++ b/example/image-classification/symbol_inception-bn.R @@ -0,0 +1,113 @@ +library(mxnet) + +ConvFactory <- function(data, num_filter, kernel, stride = c(1, 1), + pad = c(0, 0), name = '', suffix = '') { + conv <- mx.symbol.Convolution(data = data, num_filter = num_filter, + kernel = kernel, stride = stride, pad = pad, + name = paste('conv_', name, suffix, sep = '')) + + bn <- mx.symbol.BatchNorm(data = conv, name = paste('bn_', name, suffix, sep = '')) + act <- mx.symbol.Activation(data = bn, act_type = 'relu', name = paste('relu_', name, suffix, sep = '')) + return(act) +} + +InceptionFactoryA <- function(data, num_1x1, num_3x3red, num_3x3, num_d3x3red, + num_d3x3, pool, proj, name) { + # 1x1 + c1x1 <- ConvFactory(data = data, num_filter = num_1x1, kernel = c(1, 1), name = paste(name, '_1x1', sep = '') + ) + # 3x3 reduce + 3x3 + c3x3r <- ConvFactory(data = data, num_filter = num_3x3red, kernel = c(1, 1), + name = paste(name, '_3x3', sep = ''), suffix = '_reduce') + + c3x3 <- ConvFactory(data = c3x3r, num_filter = num_3x3, kernel = c(3, 3), + pad = c(1, 1), name = paste(name, '_3x3', sep = '')) + # double 3x3 reduce + double 3x3 + cd3x3r <- ConvFactory(data = data, num_filter = num_d3x3red, kernel = c(1, 1), + name = paste(name, '_double_3x3', sep = ''), suffix = '_reduce') + + cd3x3 <- ConvFactory(data = cd3x3r, num_filter = num_d3x3, kernel = c(3, 3), + pad = c(1, 1), name = paste(name, '_double_3x3_0', sep = '')) + + cd3x3 <- ConvFactory(data = cd3x3, num_filter = num_d3x3, kernel = c(3, 3), + pad = c(1, 1), name = paste(name, '_double_3x3_1', sep = '')) + # pool + proj + pooling <- mx.symbol.Pooling(data = data, kernel = c(3, 3), stride = c(1, 1), + pad = c(1, 1), pool_type = pool, + name = paste(pool, '_pool_', name, '_pool', sep = '')) + cproj <- ConvFactory(data = pooling, num_filter = proj, kernel = c(1, 1), + name = paste(name, '_proj', sep = '')) + # concat + concat_lst <- list() + concat_lst <- c(c1x1, c3x3, cd3x3, cproj) + concat_lst$num.args = 4 + concat_lst$name = paste('ch_concat_', name, '_chconcat', sep = '') + concat = mxnet:::mx.varg.symbol.Concat(concat_lst) + return(concat) +} + +InceptionFactoryB <- function(data, num_3x3red, num_3x3, num_d3x3red, num_d3x3, name) { + # 3x3 reduce + 3x3 + c3x3r <- ConvFactory(data = data, num_filter = num_3x3red, kernel = c(1, 1), + name = paste(name, '_3x3', sep = ''), suffix = '_reduce') + c3x3 <- ConvFactory(data = c3x3r, num_filter = num_3x3, kernel = c(3, 3), + pad = c(1, 1), stride = c(2, 2), name = paste(name, '_3x3', sep = '')) + # double 3x3 reduce + double 3x3 + cd3x3r <- ConvFactory(data = data, num_filter = num_d3x3red, kernel = c(1, 1), + name = paste(name, '_double_3x3', sep = ''), suffix = '_reduce') + cd3x3 <- ConvFactory(data = cd3x3r, num_filter = num_d3x3, kernel = c(3, 3), + pad = c(1, 1), stride = c(1, 1), name = paste(name, '_double_3x3_0', sep = '')) + cd3x3 = ConvFactory(data = cd3x3, num_filter = num_d3x3, kernel = c(3, 3), + pad = c(1, 1), stride = c(2, 2), name = paste(name, '_double_3x3_1', sep = '')) + # pool + proj + pooling = mx.symbol.Pooling(data = data, kernel = c(3, 3), stride = c(2, 2), + pad = c(1, 1), pool_type = "max", + name = paste('max_pool_', name, '_pool', sep = '')) + # concat + concat_lst <- list() + concat_lst <- c(c3x3, cd3x3, pooling) + concat_lst$num.args = 3 + concat_lst$name = paste('ch_concat_', name, '_chconcat', sep = '') + concat = mxnet:::mx.varg.symbol.Concat(concat_lst) + return(concat) +} + +get_symbol <- function(num_classes = 1000) { + # data + data = mx.symbol.Variable(name = "data") + # stage 1 + conv1 = ConvFactory(data = data, num_filter = 64, kernel = c(7, 7), + stride = c(2, 2), pad = c(3, 3), name = 'conv1') + pool1 = mx.symbol.Pooling(data = conv1, kernel = c(3, 3), stride = c(2, 2), + name = 'pool1', pool_type = 'max') + # stage 2 + conv2red = ConvFactory(data = pool1, num_filter = 64, kernel = c(1, 1), + stride = c(1, 1), name = 'conv2red') + conv2 = ConvFactory(data = conv2red, num_filter = 192, kernel = c(3, 3), + stride = c(1, 1), pad = c(1, 1), name = 'conv2') + pool2 = mx.symbol.Pooling(data = conv2, kernel = c(3, 3), stride = c(2, 2), + name = 'pool2', pool_type = 'max') + # stage 2 + in3a = InceptionFactoryA(pool2, 64, 64, 64, 64, 96, "avg", 32, '3a') + in3b = InceptionFactoryA(in3a, 64, 64, 96, 64, 96, "avg", 64, '3b') + in3c = InceptionFactoryB(in3b, 128, 160, 64, 96, '3c') + # stage 3 + in4a = InceptionFactoryA(in3c, 224, 64, 96, 96, 128, "avg", 128, '4a') + in4b = InceptionFactoryA(in4a, 192, 96, 128, 96, 128, "avg", 128, '4b') + in4c = InceptionFactoryA(in4b, 160, 128, 160, 128, 160, "avg", 128, '4c') + in4d = InceptionFactoryA(in4c, 96, 128, 192, 160, 192, "avg", 128, '4d') + in4e = InceptionFactoryB(in4d, 128, 192, 192, 256, '4e') + # stage 4 + in5a = InceptionFactoryA(in4e, 352, 192, 320, 160, 224, "avg", 128, '5a') + in5b = InceptionFactoryA(in5a, 352, 192, 320, 192, 224, "max", 128, '5b') + # global avg pooling + avg = mx.symbol.Pooling(data = in5b, kernel = c(7, 7), stride = c(1, 1), + name = "global_pool", pool_type = 'avg') + # linear classifier + flatten = mx.symbol.Flatten(data = avg, name = 'flatten') + fc1 = mx.symbol.FullyConnected(data = flatten, + num_hidden = num_classes, + name = 'fc1') + softmax = mx.symbol.SoftmaxOutput(data = fc1, name = 'softmax') + return(softmax) +} diff --git a/example/image-classification/symbol_lenet.R b/example/image-classification/symbol_lenet.R new file mode 100644 index 000000000000..b9991912bf52 --- /dev/null +++ b/example/image-classification/symbol_lenet.R @@ -0,0 +1,24 @@ +library(mxnet) + +get_symbol <- function(num_classes = 1000) { + data <- mx.symbol.Variable('data') + # first conv + conv1 <- mx.symbol.Convolution(data = data, kernel = c(5, 5), num_filter = 20) + + tanh1 <- mx.symbol.Activation(data = conv1, act_type = "tanh") + pool1 <- mx.symbol.Pooling(data = tanh1, pool_type = "max", kernel = c(2, 2), stride = c(2, 2)) + + # second conv + conv2 <- mx.symbol.Convolution(data = pool1, kernel = c(5, 5), num_filter = 50) + tanh2 <- mx.symbol.Activation(data = conv2, act_type = "tanh") + pool2 <- mx.symbol.Pooling(data = tanh2, pool_type = "max", kernel = c(2, 2), stride = c(2, 2)) + # first fullc + flatten <- mx.symbol.Flatten(data = pool2) + fc1 <- mx.symbol.FullyConnected(data = flatten, num_hidden = 500) + tanh3 <- mx.symbol.Activation(data = fc1, act_type = "tanh") + # second fullc + fc2 <- mx.symbol.FullyConnected(data = tanh3, num_hidden = num_classes) + # loss + lenet <- mx.symbol.SoftmaxOutput(data = fc2, name = 'softmax') + return(lenet) +} diff --git a/example/image-classification/symbol_mlp.R b/example/image-classification/symbol_mlp.R new file mode 100644 index 000000000000..55aaf1f776ff --- /dev/null +++ b/example/image-classification/symbol_mlp.R @@ -0,0 +1,12 @@ +library(mxnet) + +get_symbol <- function(num_classes = 1000) { + data <- mx.symbol.Variable('data') + fc1 <- mx.symbol.FullyConnected(data = data, name = 'fc1', num_hidden = 128) + act1 <- mx.symbol.Activation(data = fc1, name = 'relu1', act_type = "relu") + fc2 <- mx.symbol.FullyConnected(data = act1, name = 'fc2', num_hidden = 64) + act2 <- mx.symbol.Activation(data = fc2, name = 'relu2', act_type = "relu") + fc3 <- mx.symbol.FullyConnected(data = act2, name = 'fc3', num_hidden = num_classes) + mlp <- mx.symbol.SoftmaxOutput(data = fc3, name = 'softmax') + return(mlp) +} diff --git a/example/image-classification/symbol_resnet-28-small.R b/example/image-classification/symbol_resnet-28-small.R new file mode 100644 index 000000000000..4ef9e950059d --- /dev/null +++ b/example/image-classification/symbol_resnet-28-small.R @@ -0,0 +1,82 @@ +library(mxnet) + +conv_factory <- function(data, num_filter, kernel, stride, + pad, act_type = 'relu', conv_type = 0) { + if (conv_type == 0) { + conv = mx.symbol.Convolution(data = data, num_filter = num_filter, + kernel = kernel, stride = stride, pad = pad) + bn = mx.symbol.BatchNorm(data = conv) + act = mx.symbol.Activation(data = bn, act_type = act_type) + return(act) + } else if (conv_type == 1) { + conv = mx.symbol.Convolution(data = data, num_filter = num_filter, + kernel = kernel, stride = stride, pad = pad) + bn = mx.symbol.BatchNorm(data = conv) + return(bn) + } +} + +residual_factory <- function(data, num_filter, dim_match) { + if (dim_match) { + identity_data = data + conv1 = conv_factory(data = data, num_filter = num_filter, kernel = c(3, 3), + stride = c(1, 1), pad = c(1, 1), act_type = 'relu', conv_type = 0) + + conv2 = conv_factory(data = conv1, num_filter = num_filter, kernel = c(3, 3), + stride = c(1, 1), pad = c(1, 1), conv_type = 1) + new_data = identity_data + conv2 + act = mx.symbol.Activation(data = new_data, act_type = 'relu') + return(act) + } else { + conv1 = conv_factory(data = data, num_filter = num_filter, kernel = c(3, 3), + stride = c(2, 2), pad = c(1, 1), act_type = 'relu', conv_type = 0) + conv2 = conv_factory(data = conv1, num_filter = num_filter, kernel = c(3, 3), + stride = c(1, 1), pad = c(1, 1), conv_type = 1) + + # adopt project method in the paper when dimension increased + project_data = conv_factory(data = data, num_filter = num_filter, kernel = c(1, 1), + stride = c(2, 2), pad = c(0, 0), conv_type = 1) + new_data = project_data + conv2 + act = mx.symbol.Activation(data = new_data, act_type = 'relu') + return(act) + } +} + +residual_net <- function(data, n) { + #fisrt 2n layers + for (i in 1:n) { + data = residual_factory(data = data, num_filter = 16, dim_match = TRUE) + } + + + #second 2n layers + for (i in 1:n) { + if (i == 1) { + data = residual_factory(data = data, num_filter = 32, dim_match = FALSE) + } else { + data = residual_factory(data = data, num_filter = 32, dim_match = TRUE) + } + } + #third 2n layers + for (i in 1:n) { + if (i == 1) { + data = residual_factory(data = data, num_filter = 64, dim_match = FALSE) + } else { + data = residual_factory(data = data, num_filter = 64, dim_match = TRUE) + } + } + return(data) +} + +get_symbol <- function(num_classes = 10) { + conv <- conv_factory(data = mx.symbol.Variable(name = 'data'), num_filter = 16, + kernel = c(3, 3), stride = c(1, 1), pad = c(1, 1), + act_type = 'relu', conv_type = 0) + n <- 3 # set n = 3 means get a model with 3*6+2=20 layers, set n = 9 means 9*6+2=56 layers + resnet <- residual_net(conv, n) # + pool <- mx.symbol.Pooling(data = resnet, kernel = c(7, 7), pool_type = 'avg') + flatten <- mx.symbol.Flatten(data = pool, name = 'flatten') + fc <- mx.symbol.FullyConnected(data = flatten, num_hidden = num_classes, name = 'fc1') + softmax <- mx.symbol.SoftmaxOutput(data = fc, name = 'softmax') + return(softmax) +} diff --git a/example/image-classification/symbol_resnet.R b/example/image-classification/symbol_resnet.R new file mode 100644 index 000000000000..224fa91c8b25 --- /dev/null +++ b/example/image-classification/symbol_resnet.R @@ -0,0 +1,70 @@ +library(mxnet) + +get_conv <- function(name, data, num_filter, kernel, stride, + pad, with_relu, bn_momentum) { + conv = mx.symbol.Convolution(name = name, data = data, num_filter = num_filter, + kernel = kernel, stride = stride, pad = pad, no_bias = TRUE) + bn = mx.symbol.BatchNorm(name = paste(name, '_bn', sep = ''), data = conv, + fix_gamma = FALSE, momentum = bn_momentum, eps = 2e-5) + if (with_relu) { + return(mx.symbol.Activation(name = paste(name, '_relu', sep = ''), + data = bn, act_type = 'relu')) + } else { + return(bn) + } +} + +make_block <- function(name, data, num_filter, dim_match, bn_momentum) { + if (dim_match) { + conv1 = get_conv(name = paste(name, '_conv1', sep = ''), data = data, + num_filter = num_filter, kernel = c(3, 3), stride = c(1, 1), + pad = c(1, 1), with_relu = TRUE, bn_momentum = bn_momentum) + } else { + conv1 = get_conv(name = paste(name, '_conv1', sep = ''), data = data, + num_filter = num_filter, kernel = c(3, 3), stride = c(2, 2), + pad = c(1, 1), with_relu = TRUE, bn_momentum = bn_momentum) + } + + conv2 = get_conv(name = paste(name, '_conv2', sep = ''), data = conv1, + num_filter = num_filter, kernel = c(3, 3), stride = c(1, 1), + pad = c(1, 1), with_relu = FALSE, bn_momentum = bn_momentum) + if (dim_match) { + shortcut = data + } else { + shortcut = mx.symbol.Convolution(name = paste(name, '_proj', sep = ''), + data = data, num_filter = num_filter, kernel = c(2, 2), + stride = c(2, 2), pad = c(0, 0), no_bias = TRUE) + } + fused = shortcut + conv2 + return(mx.symbol.Activation(name = paste(name, '_relu', sep = ''), data = fused, act_type = 'relu')) +} + +get_body <- function(data, num_level, num_block, num_filter, bn_momentum) { + for (level in 1:num_level) { + for (block in 1:num_block) { + data = make_block( + name = paste('level', level, '_block', block, sep = ''), + data = data, + num_filter = num_filter * 2 ^ (level - 1), + dim_match = (level == 1 || block > 1), + bn_momentum = bn_momentum + ) + } + } + return(data) +} + +get_symbol <- function(num_class, num_level = 3, num_block = 9, + num_filter = 16, bn_momentum = 0.9, pool_kernel = c(8, 8)) { + data = mx.symbol.Variable(name = 'data') + zscore = mx.symbol.BatchNorm(name = 'zscore', data = data, + fix_gamma = TRUE, momentum = bn_momentum) + conv = get_conv(name = 'conv0', data = zscore, num_filter = num_filter, + kernel = c(3, 3), stride = c(1, 1), pad = c(1, 1), + with_relu = TRUE, bn_momentum = bn_momentum) + body = get_body(conv, num_level, num_block, num_filter, bn_momentum) + pool = mx.symbol.Pooling(data = body, kernel = pool_kernel, pool_type = 'avg') + flat = mx.symbol.Flatten(data = pool) + fc = mx.symbol.FullyConnected(data = flat, num_hidden = num_class, name = 'fc') + return(mx.symbol.SoftmaxOutput(data = fc, name = 'softmax')) +} diff --git a/example/image-classification/symbol_unet.R b/example/image-classification/symbol_unet.R new file mode 100644 index 000000000000..e15b48a4a005 --- /dev/null +++ b/example/image-classification/symbol_unet.R @@ -0,0 +1,81 @@ +library(mxnet) + +convolution_module <- function(net, kernel_size, pad_size, + filter_count, stride = c(1, 1), work_space = 2048, + batch_norm = TRUE, down_pool = FALSE, up_pool = FALSE, + act_type = "relu", convolution = TRUE) { + if (up_pool) { + net = mx.symbol.Deconvolution(net, kernel = c(2, 2), pad = c(0, 0), + stride = c(2, 2), num_filter = filter_count, workspace = work_space) + net = mx.symbol.BatchNorm(net) + if (act_type != "") { + net = mx.symbol.Activation(net, act_type = act_type) + } + } + if (convolution) { + conv = mx.symbol.Convolution(data = net, kernel = kernel_size, stride = stride, + pad = pad_size, num_filter = filter_count, workspace = work_space) + net = conv + } + + if (batch_norm) { + net = mx.symbol.BatchNorm(net) + } + + if (act_type != "") { + net = mx.symbol.Activation(net, act_type = act_type) + } + + if (down_pool) { + pool = mx.symbol.Pooling(net, pool_type = "max", kernel = c(2, 2), stride = c(2, 2)) + net = pool + } + return(net) +} + +get_symbol <- function(num_classes = 10) { + data = mx.symbol.Variable('data') + kernel_size = c(3, 3) + pad_size = c(1, 1) + filter_count = 32 + pool1 = convolution_module(data, kernel_size, pad_size, filter_count = filter_count, down_pool = TRUE) + net = pool1 + pool2 = convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 2, down_pool = TRUE) + net = pool2 + pool3 = convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 4, down_pool = TRUE) + net = pool3 + pool4 = convolution_module(net, + kernel_size, + pad_size, + filter_count = filter_count * 4, + down_pool = TRUE) + net = pool4 + net = mx.symbol.Dropout(net) + pool5 = convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 8, down_pool = TRUE) + net = pool5 + net = convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 4, up_pool = TRUE) + net = convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 4, up_pool = TRUE) + + # dirty "CROP" to wanted size... I was on old MxNet branch so used conv instead of crop for cropping + net = convolution_module(net, c(4, 4), c(0, 0), filter_count = filter_count * 4) + + net = mx.symbol.Concat(c(pool3, net), num.args = 2) + net = mx.symbol.Dropout(net) + net = convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 4) + net = convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 4, up_pool = TRUE) + + net = mx.symbol.Concat(c(pool2, net), num.args = 2) + net = mx.symbol.Dropout(net) + net = convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 4) + net = convolution_module(net, kernel_size, pad_size, + filter_count = filter_count * 4, up_pool = TRUE) + convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 4) + net = mx.symbol.Concat(c(pool1, net), num.args = 2) + net = mx.symbol.Dropout(net) + net = convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 2) + net = convolution_module(net, kernel_size, pad_size, filter_count = filter_count * 2, up_pool = TRUE) + net = mx.symbol.Flatten(net) + net = mx.symbol.FullyConnected(data = net, num_hidden = num_classes) + net = mx.symbol.SoftmaxOutput(data = net, name = 'softmax') + return(net) +} \ No newline at end of file diff --git a/example/image-classification/symbol_vgg.R b/example/image-classification/symbol_vgg.R new file mode 100644 index 000000000000..4ebd1017a3f6 --- /dev/null +++ b/example/image-classification/symbol_vgg.R @@ -0,0 +1,58 @@ +library(mxnet) + +get_symbol <- function(num_classes = 1000) { + ## define alexnet + data = mx.symbol.Variable(name = "data") + # group 1 + conv1_1 = mx.symbol.Convolution(data = data, kernel = c(3, 3), pad = c(1, 1), + num_filter = 64, name = "conv1_1") + relu1_1 = mx.symbol.Activation(data = conv1_1, act_type = "relu", name = "relu1_1") + pool1 = mx.symbol.Pooling(data = relu1_1, pool_type = "max", kernel = c(2, 2), + stride = c(2, 2), name = "pool1") + # group 2 + conv2_1 = mx.symbol.Convolution(data = pool1, kernel = c(3, 3), pad = c(1, 1), + num_filter = 128, name = "conv2_1") + relu2_1 = mx.symbol.Activation(data = conv2_1, act_type = "relu", name = "relu2_1") + pool2 = mx.symbol.Pooling(data = relu2_1, pool_type = "max", kernel = c(2, 2), + stride = c(2, 2), name = "pool2") + # group 3 + conv3_1 = mx.symbol.Convolution(data = pool2, kernel = c(3, 3), pad = c(1, 1), + num_filter = 256, name = "conv3_1") + relu3_1 = mx.symbol.Activation(data = conv3_1, act_type = "relu", name = "relu3_1") + conv3_2 = mx.symbol.Convolution(data = relu3_1, kernel = c(3, 3), pad = c(1, 1), + num_filter = 256, name = "conv3_2") + relu3_2 = mx.symbol.Activation(data = conv3_2, act_type = "relu", name = "relu3_2") + pool3 = mx.symbol.Pooling(data = relu3_2, pool_type = "max", kernel = c(2, 2), + stride = c(2, 2), name = "pool3") + # group 4 + conv4_1 = mx.symbol.Convolution(data = pool3, kernel = c(3, 3), pad = c(1, 1), + num_filter = 512, name = "conv4_1") + relu4_1 = mx.symbol.Activation(data = conv4_1, act_type = "relu", name = "relu4_1") + conv4_2 = mx.symbol.Convolution(data = relu4_1, kernel = c(3, 3), pad = c(1, 1), + num_filter = 512, name = "conv4_2") + relu4_2 = mx.symbol.Activation(data = conv4_2, act_type = "relu", name = "relu4_2") + pool4 = mx.symbol.Pooling(data = relu4_2, pool_type = "max", + kernel = c(2, 2), stride = c(2, 2), name = "pool4") + # group 5 + conv5_1 = mx.symbol.Convolution(data = pool4, kernel = c(3, 3), + pad = c(1, 1), num_filter = 512, name = "conv5_1") + relu5_1 = mx.symbol.Activation(data = conv5_1, act_type = "relu", name = "relu5_1") + conv5_2 = mx.symbol.Convolution(data = relu5_1, kernel = c(3, 3), + pad = c(1, 1), num_filter = 512, name = "conv5_2") + relu5_2 = mx.symbol.Activation(data = conv5_2, act_type = "relu", name = "relu5_2") + pool5 = mx.symbol.Pooling(data = relu5_2, pool_type = "max", + kernel = c(2, 2), stride = c(2, 2), name = "pool5") + # group 6 + flatten = mx.symbol.Flatten(data = pool5, name = "flatten") + fc6 = mx.symbol.FullyConnected(data = flatten, num_hidden = 4096, name = "fc6") + relu6 = mx.symbol.Activation(data = fc6, act_type = "relu", name = "relu6") + drop6 = mx.symbol.Dropout(data = relu6, p = 0.5, name = "drop6") + # group 7 + fc7 = mx.symbol.FullyConnected(data = drop6, num_hidden = 4096, name = "fc7") + relu7 = mx.symbol.Activation(data = fc7, act_type = "relu", name = "relu7") + drop7 = mx.symbol.Dropout(data = relu7, p = 0.5, name = "drop7") + # output + fc8 = mx.symbol.FullyConnected(data = drop7, num_hidden = num_classes, name = "fc8") + softmax = mx.symbol.SoftmaxOutput(data = fc8, name = 'softmax') + return(softmax) +}