2D卷积层的实现
import torch
from torch import nn
def corr2d(X,K):
"""计算二维互相关运算"""
h,w = K.shape
Y = torch.zeros(size=(X.shape[0] - h + 1 , X.shape[1] - w + 1))
for i in range(Y.shape[0]):
for j in range(Y.shape[1]):
Y[i][j] = ( X[i:i+h, j:j+w] * K).sum()
return Y
class conv2d(nn.Module):
def __init__(self,kernal_size):
super().__init__()
self.weight = nn.Parameter(torch.rand(size=kernal_size))
self.bias = nn.Parameter(torch.zeros(1))
def forward(self,x):
return corr2d(x, self.weight) + self.bias
X = torch.ones((6, 8))
X[:, 2:6] = 0
K = torch.tensor([[1.0, -1.0]])
# 学习由X生成Y的卷积核
conv2d = nn.Conv2d(1,1, kernel_size=(1, 2), bias=False)
X = X.reshape((1, 1, 6, 8))
Y = Y.reshape((1, 1, 6, 7))
lr = 3e-2
for i in range(10):
conv2d.zero_grad()
y_hat = conv2d(X)
l = (y_hat - Y) ** 2
l.sum().backward()
# 梯度下降
conv2d.weight.data[:] -= lr * conv2d.weight.grad / 1
print(f' epoch = {i}, loss = {l.sum():.3f}')
运行结果
epoch = 0, loss = 26.152
epoch = 1, loss = 10.717
epoch = 2, loss = 4.393
epoch = 3, loss = 1.802
epoch = 4, loss = 0.739
epoch = 5, loss = 0.304
epoch = 6, loss = 0.125
epoch = 7, loss = 0.052
epoch = 8, loss = 0.021
epoch = 9, loss = 0.009
conv2d.weight.data[:].reshape((1,2)) -----> tensor([[ 0.9809, -0.9851]])