Learn BIGDL with Real Code Examples
Updated Nov 24, 2025
Code Sample Descriptions
1
BigDL Simple Neural Network Example
from bigdl.nn.layer import Sequential, Linear, ReLU, SoftMax
from bigdl.optim.optimizer import SGD, Top1Accuracy
from pyspark.sql import SparkSession
# Initialize Spark
spark = SparkSession.builder.appName('BigDLExample').getOrCreate()
# Define model
model = Sequential().add(Linear(4, 10)).add(ReLU()).add(Linear(10, 3)).add(SoftMax())
# Define optimizer and train (pseudo-code)
optimizer = SGD(model=model, learningrate=0.01)
# optimizer.train(data) # Replace with actual RDD or DataFrame pipeline
print('Model defined and ready for training on Spark cluster.')
A minimal BigDL example defining and training a simple feedforward neural network on Spark.
2
BigDL Convolutional Neural Network Example
from bigdl.nn.layer import Sequential, Conv2D, ReLU, MaxPooling2D, Linear, SoftMax
from bigdl.optim.optimizer import SGD
# Define CNN model
model = Sequential()
model.add(Conv2D(1, 32, kernel_size=3, stride=1, padding=1)).add(ReLU())
model.add(MaxPooling2D(pool_size=2, stride=2))
model.add(Linear(32*14*14, 128)).add(ReLU())
model.add(Linear(128, 10)).add(SoftMax())
# Define optimizer
optimizer = SGD(model=model, learningrate=0.01)
# optimizer.train(data) # RDD/DataFrame pipeline
print('CNN model ready for image classification.')
Defining a CNN using BigDL for image classification tasks.
3
BigDL RNN Example
from bigdl.nn.layer import Sequential, Recurrent, LSTM, Linear, SoftMax
from bigdl.optim.optimizer import SGD
# Define RNN model
model = Sequential()
model.add(Recurrent().add(LSTM(input_size=10, hidden_size=20)))
model.add(Linear(20, 5)).add(SoftMax())
# Define optimizer
optimizer = SGD(model=model, learningrate=0.01)
# optimizer.train(data) # Replace with actual sequence RDD
print('RNN model defined and ready for training.')
Building a simple RNN using BigDL for sequence prediction.
4
BigDL Autoencoder Example
from bigdl.nn.layer import Sequential, Linear, ReLU
from bigdl.optim.optimizer import SGD
# Define autoencoder model
model = Sequential()
model.add(Linear(20, 10)).add(ReLU())
model.add(Linear(10, 20))
# Define optimizer
optimizer = SGD(model=model, learningrate=0.01)
# optimizer.train(data)
print('Autoencoder ready for training.')
Creating a simple autoencoder using BigDL for feature compression.
5
BigDL LSTM Sequence Forecasting
from bigdl.nn.layer import Sequential, Recurrent, LSTM, Linear
from bigdl.optim.optimizer import SGD
# Define model
model = Sequential()
model.add(Recurrent().add(LSTM(input_size=1, hidden_size=50)))
model.add(Linear(50, 1))
# Define optimizer
optimizer = SGD(model=model, learningrate=0.01)
# optimizer.train(time_series_data)
print('LSTM model ready for time series forecasting.')
Using LSTM for time series forecasting in BigDL.
6
BigDL Transfer Learning Example
from bigdl.nn.layer import Sequential, Linear, SoftMax
from bigdl.optim.optimizer import SGD
# Load pretrained model (pseudo-code)
pretrained_model = load_model('resnet50_bigdl')
# Fine-tune by adding a new output layer
model = Sequential()
model.add(pretrained_model)
model.add(Linear(1000, 10)).add(SoftMax())
# Optimizer
optimizer = SGD(model=model, learningrate=0.001)
# optimizer.train(custom_data)
print('Pretrained model fine-tuned.')
Using a pretrained BigDL model and fine-tuning it for a custom task.
7
BigDL Distributed Training Example
from bigdl.nn.layer import Sequential, Linear, ReLU, SoftMax
from bigdl.optim.optimizer import SGD
from pyspark.sql import SparkSession
# Initialize Spark
spark = SparkSession.builder.appName('BigDLDistributed').getOrCreate()
# Define model
model = Sequential().add(Linear(10, 50)).add(ReLU()).add(Linear(50, 5)).add(SoftMax())
# Distributed optimizer
optimizer = SGD(model=model, learningrate=0.01, use_distributed=True)
# optimizer.train(distributed_rdd_data)
print('Distributed training setup complete.')
Illustrating distributed training using BigDL with Spark.
8
BigDL Custom Loss Example
from bigdl.nn.layer import Sequential, Linear, ReLU, SoftMax
from bigdl.optim.optimizer import SGD
from bigdl.nn.criterion import Criterion
# Custom loss function
class MyLoss(Criterion):
def forward(self, input, target):
# define loss computation
return ((input - target)**2).mean()
# Define model
model = Sequential().add(Linear(10, 20)).add(ReLU()).add(Linear(20, 5)).add(SoftMax())
# Optimizer with custom loss
optimizer = SGD(model=model, criterion=MyLoss(), learningrate=0.01)
# optimizer.train(data)
print('Model ready with custom loss.')
Defining a custom loss function in BigDL for training a neural network.
9
BigDL Convolution + LSTM Hybrid Example
from bigdl.nn.layer import Sequential, Conv2D, ReLU, MaxPooling2D, Recurrent, LSTM, Linear, SoftMax
from bigdl.optim.optimizer import SGD
# Define hybrid model
model = Sequential()
model.add(Conv2D(1, 16, kernel_size=3, stride=1, padding=1)).add(ReLU())
model.add(MaxPooling2D(pool_size=2, stride=2))
model.add(Recurrent().add(LSTM(16*14*14, 50)))
model.add(Linear(50, 10)).add(SoftMax())
# Optimizer
optimizer = SGD(model=model, learningrate=0.01)
# optimizer.train(spatio_temporal_data)
print('Hybrid Conv+LSTM model ready.')
Combining convolutional and LSTM layers for spatio-temporal data in BigDL.
10
BigDL AutoML Pipeline Example
from bigdl.nn.layer import Sequential, Linear, ReLU, SoftMax
from bigdl.optim.optimizer import SGD
# Define simple model
model = Sequential().add(Linear(10, 50)).add(ReLU()).add(Linear(50, 5)).add(SoftMax())
# Optimizer
optimizer = SGD(model=model, learningrate=0.01)
# AutoML pseudo-code
# for hyperparams in hyperparam_grid:
# optimizer.train(data)
# evaluate performance
print('AutoML-style BigDL pipeline ready.')
Illustrating an AutoML-style pipeline using BigDL for automated model training.