我有以下程序:
import torch
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class Net(nn.Module):
def __init__(self, input_size, output_size):
super(Net, self).__init__()
self.net = nn.Sequential()
self.fc1 = nn.Linear(input_size, 32)
self.fc2 = nn.Linear(32, 32)
self.fc3 = nn.Linear(32, output_size)
def forward(self, x):
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
net = Net(input_size=10, output_size=1)
x = torch.from_numpy(np.random.rand(2,10)).type(torch.FloatTensor)
y = net(x)
x0 = x[0,]
y0 = net(x0)
print(y[0,],y0)
assert y0.item() == y[0,].item(), "Outputs not equal"
在程序中,即使一次批量提供相同的输入,一次不批量提供相同的输入,为什么神经网络会返回两个不同的输出?虽然差别很小,但我很好奇为什么会出现这种情况?
我期望输出完全相同,因为所有计算都是确定性的!