import torch
from torch import nn
from tqdm.auto import tqdm
from torchvision import transforms
from torchvision.datasets import MNIST # Training datasetfrom torchvision.utils import make_grid
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
torch.manual_seed(0)# Set for testing purposes, please do not change!defshow_tensor_images(image_tensor, num_images=25, size=(1,28,28)):'''
Function for visualizing images: Given a tensor of images, number of images, and
size per image, plots and prints the images in a uniform grid.
'''
image_unflat = image_tensor.detach().cpu().view(-1,*size)
image_grid = make_grid(image_unflat[:num_images], nrow=5)
plt.imshow(image_grid.permute(1,2,0).squeeze())
plt.show()# UNQ_C1 (UNIQUE CELL IDENTIFIER, DO NOT EDIT)# GRADED FUNCTION: get_generator_blockdefget_generator_block(input_dim, output_dim):'''
Function for returning a block of the generator's neural network
given input and output dimensions.
Parameters:
input_dim: the dimension of the input vector, a scalar
output_dim: the dimension of the output vector, a scalar
Returns:
a generator neural network layer, with a linear transformation
followed by a batch normalization and then a relu activation
'''return nn.Sequential(# Hint: Replace all of the "None" with the appropriate dimensions.# The documentation may be useful if you're less familiar with PyTorch:# https://pytorch.org/docs/stable/nn.html.#### START CODE HERE ####
nn.Linear(input_dim, output_dim),
nn.BatchNorm1d(output_dim),
nn.ReLU(inplace=True),#### END CODE HERE ####)# Verify the generator block functiondeftest_gen_block(in_features, out_features, num_test=1000):
block = get_generator_block(in_features, out_features)# Check the three partsassertlen(block)==3asserttype(block[0])== nn.Linear
asserttype(block[1])== nn.BatchNorm1d
asserttype(block[2])== nn.ReLU
# Check the output shape
test_input = torch.randn(num_test, in_features)
test_output = block(test_input)asserttuple(test_output.shape)==(num_test, out_features)assert test_output.std()>0.55assert test_output.std()<0.65
test_gen_block(25,12)
test_gen_block(15,28)print("Success!")# UNQ_C2 (UNIQUE CELL IDENTIFIER, DO NOT EDIT)# GRADED FUNCTION: GeneratorclassGenerator(nn.Module):'''
Generator Class
Values:
z_dim: the dimension of the noise vector, a scalar
im_dim: the dimension of the images, fitted for the dataset used, a scalar
(MNIST images are 28 x 28 = 784 so that is your default)
hidden_dim: the inner dimension, a scalar
'''def__init__(self, z_dim=10, im_dim=784, hidden_dim=128):super(Generator, self).__init__()# Build the neural network
self.gen = nn.Sequential(
get_generator_block(z_dim, hidden_dim),
get_generator_block(hidden_dim, hidden_dim *2),
get_generator_block(hidden_dim *2, hidden_dim *4),
get_generator_block(hidden_dim *4, hidden_dim *8),# There is a dropdown with hints if you need them! #### START CODE HERE ####
nn.Linear(hidden_dim*8, im_dim),
nn.Sigmoid(),#### END CODE HERE ####)defforward(self, noise):'''
Function for completing a forward pass of the generator: Given a noise tensor,
returns generated images.
Parameters:
noise: a noise tensor with dimensions (n_samples, z_dim)
'''return self.gen(noise)# Needed for gradingdefget_gen(self):'''
Returns:
the sequential model
'''return self.gen
# Verify the generator classdeftest_generator(z_dim, im_dim, hidden_dim, num_test=10000):
gen = Generator(z_dim, im_dim, hidden_dim).get_gen()# Check there are six modules in the sequential partassertlen(gen)==6
test_input = torch.randn(num_test, z_dim)
test_output = gen(test_input)# Check that the output shape is correctasserttuple(test_output.shape)==(num_test, im_dim)assert test_output.max()<1,"Make sure to use a sigmoid"assert test_output.min()>0