Learn MXNET with Real Code Examples
Updated Nov 24, 2025
Code Sample Descriptions
1
MXNet Simple Linear Regression
import mxnet as mx
from mxnet import nd, autograd, gluon
# Sample data
x_train = nd.array([1,2,3,4])
y_train = nd.array([2,4,6,8])
# Define model
net = gluon.nn.Dense(1)
net.initialize()
loss = gluon.loss.L2Loss()
trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate': 0.01})
# Training loop
for epoch in range(500):
with autograd.record():
y_pred = net(x_train.reshape((-1,1)))
l = loss(y_pred, y_train.reshape((-1,1)))
l.backward()
trainer.step(batch_size=4)
# Predict
x_test = nd.array([10])
y_pred = net(x_test.reshape((-1,1)))
print('Prediction for 10:', y_pred.asscalar())
A minimal MXNet example performing linear regression on sample data using Gluon API.
2
MXNet Simple Neural Network
import mxnet as mx
from mxnet import nd, autograd, gluon
x_train = nd.random.uniform(shape=(100,3))
y_train = nd.random.randint(0,2,shape=(100,1))
net = gluon.nn.Sequential()
with net.name_scope():
net.add(gluon.nn.Dense(8, activation='relu'))
net.add(gluon.nn.Dense(1, activation='sigmoid'))
net.initialize()
loss = gluon.loss.SigmoidBinaryCrossEntropyLoss()
trainer = gluon.Trainer(net.collect_params(), 'adam')
for epoch in range(50):
with autograd.record():
y_pred = net(x_train)
l = loss(y_pred, y_train)
l.backward()
trainer.step(batch_size=10)
A basic feedforward neural network with Gluon API for binary classification.
3
MXNet MNIST Classifier
import mxnet as mx
from mxnet.gluon import nn, data as gdata
from mxnet import autograd
mnist_train = gdata.vision.MNIST(train=True)
mnist_test = gdata.vision.MNIST(train=False)
batch_size = 64
train_data = gdata.DataLoader(mnist_train.transform_first(gdata.vision.transforms.ToTensor()), batch_size=batch_size)
test_data = gdata.DataLoader(mnist_test.transform_first(gdata.vision.transforms.ToTensor()), batch_size=batch_size)
net = nn.Sequential()
with net.name_scope():
net.add(nn.Flatten())
net.add(nn.Dense(128, activation='relu'))
net.add(nn.Dense(10))
net.initialize(mx.init.Xavier())
loss = gluon.loss.SoftmaxCrossEntropyLoss()
trainer = gluon.Trainer(net.collect_params(), 'adam')
Train a simple fully-connected network on MNIST digits using Gluon API.
4
MXNet Convolutional Neural Network
import mxnet as mx
from mxnet.gluon import nn
net = nn.Sequential()
with net.name_scope():
net.add(nn.Conv2D(32, kernel_size=3, activation='relu'))
net.add(nn.MaxPool2D(pool_size=2))
net.add(nn.Flatten())
net.add(nn.Dense(64, activation='relu'))
net.add(nn.Dense(10))
net.initialize(mx.init.Xavier())
A simple CNN for image classification using Gluon API.
5
MXNet LSTM Example
import mxnet as mx
from mxnet.gluon import nn, rnn
from mxnet import nd, autograd
x_train = nd.random.uniform(shape=(100,10,1))
y_train = nd.random.uniform(shape=(100,1))
net = nn.Sequential()
with net.name_scope():
net.add(rnn.LSTM(50, layout='NTC'))
net.add(nn.Dense(1))
net.initialize()
loss = gluon.loss.L2Loss()
trainer = gluon.Trainer(net.collect_params(), 'adam')
for epoch in range(20):
with autograd.record():
y_pred = net(x_train)
l = loss(y_pred, y_train)
l.backward()
trainer.step(batch_size=10)
A simple LSTM network for sequence prediction using Gluon API.
6
MXNet Autoencoder Example
import mxnet as mx
from mxnet.gluon import nn
input_dim = 20
net = nn.Sequential()
with net.name_scope():
net.add(nn.Dense(10, activation='relu', input_dim=input_dim))
net.add(nn.Dense(5, activation='relu'))
net.add(nn.Dense(10, activation='relu'))
net.add(nn.Dense(input_dim, activation='sigmoid'))
net.initialize(mx.init.Xavier())
Simple autoencoder for data compression using Gluon API.
7
MXNet Regression with Multiple Inputs
import mxnet as mx
from mxnet import nd, autograd, gluon
x_train = nd.random.uniform(shape=(100,3))
y_train = nd.dot(x_train, nd.array([1.5,-2.0,1.0])) + 0.5
net = gluon.nn.Dense(1)
net.initialize()
loss = gluon.loss.L2Loss()
trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate':0.01})
for epoch in range(100):
with autograd.record():
y_pred = net(x_train)
l = loss(y_pred, y_train.reshape((-1,1)))
l.backward()
trainer.step(batch_size=10)
Linear regression with multiple features using Gluon API.
8
MXNet Transfer Learning Example
from mxnet.gluon.model_zoo import vision
from mxnet import gluon
pretrained_net = vision.resnet18_v2(pretrained=True)
finetune_net = vision.resnet18_v2(classes=10)
finetune_net.features = pretrained_net.features
finetune_net.output.initialize()
finetune_net.collect_params().reset_ctx(mx.cpu())
Using a pre-trained ResNet model for image classification with Gluon API.
9
MXNet Custom Callback Example
import mxnet as mx
from mxnet import nd, autograd, gluon
x_train = nd.array([1,2,3,4])
y_train = nd.array([2,4,6,8])
net = gluon.nn.Dense(1)
net.initialize()
loss = gluon.loss.L2Loss()
trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate':0.01})
for epoch in range(5):
with autograd.record():
y_pred = net(x_train.reshape((-1,1)))
l = loss(y_pred, y_train.reshape((-1,1)))
l.backward()
trainer.step(batch_size=4)
print(f'Epoch {epoch+1}, loss={nd.mean(l).asscalar()}')
Custom training loop with MXNet Gluon, printing loss each epoch.
10
MXNet GAN Example
import mxnet as mx
from mxnet.gluon import nn
# Generator
generator = nn.Sequential()
generator.add(nn.Dense(128, activation='relu', input_dim=100))
generator.add(nn.Dense(784, activation='sigmoid'))
# Discriminator
discriminator = nn.Sequential()
discriminator.add(nn.Dense(128, activation='relu', input_dim=784))
discriminator.add(nn.Dense(1, activation='sigmoid'))
discriminator.initialize(mx.init.Xavier())
Minimal GAN structure using Gluon API.