Learn KERAS with Real Code Examples
Updated Nov 24, 2025
Code Sample Descriptions
1
Keras Simple Linear Regression
import numpy as np
from tensorflow import keras
from tensorflow.keras import layers
# Sample data
x_train = np.array([1, 2, 3, 4], dtype=float)
y_train = np.array([2, 4, 6, 8], dtype=float)
# Define model
model = keras.Sequential([layers.Dense(units=1, input_shape=[1])])
model.compile(optimizer='sgd', loss='mean_squared_error')
# Train the model
model.fit(x_train, y_train, epochs=500)
# Predict
y_pred = model.predict([10.0])
print("Prediction for 10:", y_pred)
A minimal Keras example for linear regression using a single dense layer.
2
Keras Simple Neural Network
import numpy as np
from tensorflow import keras
from tensorflow.keras import layers
# Sample data
x_train = np.random.rand(100,3)
y_train = np.random.randint(0,2,100)
# Define model
model = keras.Sequential([
layers.Dense(8, activation='relu', input_shape=[3]),
layers.Dense(1, activation='sigmoid')
])
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
model.fit(x_train, y_train, epochs=50)
Basic feedforward neural network for binary classification.
3
Keras MNIST Classifier
from tensorflow import keras
from tensorflow.keras import layers
(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()
x_train, x_test = x_train/255.0, x_test/255.0
model = keras.Sequential([
layers.Flatten(input_shape=(28,28)),
layers.Dense(128, activation='relu'),
layers.Dense(10, activation='softmax')
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
model.fit(x_train, y_train, epochs=5)
model.evaluate(x_test, y_test)
Train a simple fully-connected network on MNIST digits.
4
Keras Convolutional Neural Network
from tensorflow import keras
from tensorflow.keras import layers
model = keras.Sequential([
layers.Conv2D(32, (3,3), activation='relu', input_shape=(28,28,1)),
layers.MaxPooling2D((2,2)),
layers.Flatten(),
layers.Dense(64, activation='relu'),
layers.Dense(10, activation='softmax')
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
A simple CNN for image classification.
5
Keras LSTM Example
import numpy as np
from tensorflow import keras
from tensorflow.keras import layers
x_train = np.random.rand(100,10,1)
y_train = np.random.rand(100,1)
model = keras.Sequential([
layers.LSTM(50, input_shape=(10,1)),
layers.Dense(1)
])
model.compile(optimizer='adam', loss='mse')
model.fit(x_train, y_train, epochs=20)
A simple LSTM network for sequence prediction.
6
Keras Autoencoder Example
from tensorflow import keras
from tensorflow.keras import layers
input_dim = 20
model = keras.Sequential([
layers.Dense(10, activation='relu', input_shape=(input_dim,)),
layers.Dense(5, activation='relu'),
layers.Dense(10, activation='relu'),
layers.Dense(input_dim, activation='sigmoid')
])
model.compile(optimizer='adam', loss='mse')
A simple autoencoder for dimensionality reduction.
7
Keras Regression with Multiple Inputs
import numpy as np
from tensorflow import keras
from tensorflow.keras import layers
x_train = np.random.rand(100,3)
y_train = np.dot(x_train, [1.5,-2.0,1.0])+0.5
model = keras.Sequential([layers.Dense(1, input_shape=[3])])
model.compile(optimizer='sgd', loss='mse')
model.fit(x_train, y_train, epochs=100)
Linear regression with multiple features.
8
Keras Transfer Learning Example
from tensorflow import keras
base_model = keras.applications.MobileNetV2(input_shape=(128,128,3), include_top=False, weights='imagenet')
base_model.trainable = False
model = keras.Sequential([
base_model,
layers.GlobalAveragePooling2D(),
layers.Dense(10, activation='softmax')
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
Use a pre-trained MobileNetV2 for image classification.
9
Keras Custom Callback Example
from tensorflow import keras
class PrintLossCallback(keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs=None):
print(f"Epoch {epoch+1}: loss = {logs['loss']}")
model = keras.Sequential([keras.layers.Dense(1, input_shape=[1])])
model.compile(optimizer='sgd', loss='mse')
model.fit([1,2,3],[2,4,6], epochs=5, callbacks=[PrintLossCallback()])
Custom callback to print loss at each epoch.
10
Keras GAN Example
from tensorflow import keras
from tensorflow.keras import layers
# Generator
generator = keras.Sequential([
layers.Dense(128, activation='relu', input_shape=(100,)),
layers.Dense(784, activation='sigmoid')
])
# Discriminator
discriminator = keras.Sequential([
layers.Dense(128, activation='relu', input_shape=(784,)),
layers.Dense(1, activation='sigmoid')
])
discriminator.compile(optimizer='adam', loss='binary_crossentropy')
Minimal GAN architecture structure in Keras.