Learn TENSORFLOW with Real Code Examples
Updated Nov 24, 2025
Code Sample Descriptions
1
TensorFlow Simple Linear Regression
import tensorflow as tf
import numpy as np
# Sample data
x_train = np.array([1, 2, 3, 4], dtype=float)
y_train = np.array([2, 4, 6, 8], dtype=float)
# Define a simple linear model
model = tf.keras.Sequential([tf.keras.layers.Dense(units=1, input_shape=[1])])
model.compile(optimizer='sgd', loss='mean_squared_error')
# Train the model
model.fit(x_train, y_train, epochs=500)
# Predict
y_pred = model.predict([10.0])
print("Prediction for 10:", y_pred)
A minimal TensorFlow example showing linear regression training on sample data.
2
TensorFlow Simple Neural Network
import tensorflow as tf
import numpy as np
# Sample data
x_train = np.random.rand(100, 3)
y_train = np.random.randint(0, 2, 100)
# Define model
model = tf.keras.Sequential([
tf.keras.layers.Dense(8, activation='relu', input_shape=[3]),
tf.keras.layers.Dense(1, activation='sigmoid')
])
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
# Train
model.fit(x_train, y_train, epochs=50)
A basic feedforward neural network for classification.
3
TensorFlow MNIST Example
import tensorflow as tf
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train/255.0, x_test/255.0
model = tf.keras.Sequential([
tf.keras.layers.Flatten(input_shape=(28,28)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(10, activation='softmax')
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
model.fit(x_train, y_train, epochs=5)
model.evaluate(x_test, y_test)
Train a simple MNIST digit classifier using TensorFlow.
4
TensorFlow Convolutional Neural Network
import tensorflow as tf
from tensorflow.keras import layers, models
model = models.Sequential()
model.add(layers.Conv2D(32, (3,3), activation='relu', input_shape=(28,28,1)))
model.add(layers.MaxPooling2D((2,2)))
model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(10, activation='softmax'))
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
A simple CNN for image classification.
5
TensorFlow LSTM Example
import tensorflow as tf
import numpy as np
# Sample sequence data
x_train = np.random.rand(100, 10, 1)
y_train = np.random.rand(100, 1)
model = tf.keras.Sequential([
tf.keras.layers.LSTM(50, input_shape=(10,1)),
tf.keras.layers.Dense(1)
])
model.compile(optimizer='adam', loss='mse')
model.fit(x_train, y_train, epochs=20)
A simple LSTM network for sequence prediction.
6
TensorFlow Autoencoder Example
import tensorflow as tf
from tensorflow.keras import layers, models
input_dim = 20
model = models.Sequential([
layers.Dense(10, activation='relu', input_shape=(input_dim,)),
layers.Dense(5, activation='relu'),
layers.Dense(10, activation='relu'),
layers.Dense(input_dim, activation='sigmoid')
])
model.compile(optimizer='adam', loss='mse')
A simple autoencoder for data compression.
7
TensorFlow Regression with Multiple Inputs
import tensorflow as tf
import numpy as np
x_train = np.random.rand(100,3)
y_train = np.dot(x_train, [1.5, -2.0, 1.0]) + 0.5
model = tf.keras.Sequential([
tf.keras.layers.Dense(1, input_shape=[3])
])
model.compile(optimizer='sgd', loss='mse')
model.fit(x_train, y_train, epochs=100)
Linear regression with multiple input features.
8
TensorFlow Transfer Learning Example
import tensorflow as tf
base_model = tf.keras.applications.MobileNetV2(input_shape=(128,128,3), include_top=False, weights='imagenet')
base_model.trainable = False
model = tf.keras.Sequential([
base_model,
tf.keras.layers.GlobalAveragePooling2D(),
tf.keras.layers.Dense(10, activation='softmax')
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
Use a pre-trained MobileNet model for image classification.
9
TensorFlow Custom Callback Example
import tensorflow as tf
class PrintLossCallback(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs=None):
print(f"Epoch {epoch+1}: loss = {logs['loss']}")
model = tf.keras.Sequential([tf.keras.layers.Dense(1, input_shape=[1])])
model.compile(optimizer='sgd', loss='mse')
model.fit([1,2,3], [2,4,6], epochs=5, callbacks=[PrintLossCallback()])
Define a custom callback to print loss after each epoch.
10
TensorFlow GAN Example
import tensorflow as tf
from tensorflow.keras import layers
# Generator
generator = tf.keras.Sequential([
layers.Dense(128, activation='relu', input_shape=(100,)),
layers.Dense(784, activation='sigmoid')
])
# Discriminator
discriminator = tf.keras.Sequential([
layers.Dense(128, activation='relu', input_shape=(784,)),
layers.Dense(1, activation='sigmoid')
])
# Compile discriminator
discriminator.compile(optimizer='adam', loss='binary_crossentropy')
A minimal GAN example structure.