Example:
import torch
import torch.nn as nn
import torch.nn.functional as F
class FCC(nn.Module):
def __init__(self,input_dim,hidden_dim,output_dim):
super(FCC, self).__init__()
self.linear1 = nn.Linear(input_dim,hidden_dim)
self.linear2 = nn.Linear(hidden_dim,output_dim)
self.Dropout = nn.Dropout(p=0.8)
self.dropout = 0.8
self.training = True
def forward(self, input):
print("input = ",input)
input = F.dropout(input, self.dropout, self.training)
print("input1 = ", input)
out = self.linear1(input)
out = F.dropout(out, self.dropout, self.training)
print("out1 = ", out)
out = self.linear2(out)
out = self.Dropout(out)
print("out2 = ", out)
input = torch.randint(1,4,(5,4))
model = FCC(4,3,2)
model(input)
输出:
input = tensor([[2., 3., 1., 1.],
[2., 1., 1., 2.],
[1., 1., 1., 3.],
[2., 3., 1., 3.],
[3., 1., 1., 3.]])
input1 = tensor([[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 0., 0., 0.],
[0., 5., 0., 0.]])
out1 = tensor([[ 0.0000, -0.0000, 0.0000],
[ 0.8460, -0.0000, 0.0000],
[ 0.0000, -0.0000, 1.0678],
[ 0.8460, -0.0000, 0.0000],
[ 0.0000, 0.0000, 0.0000]], grad_fn=<DropoutBackward>)
out2 = tensor([[ 2.6848, -0.0000],
[ 0.0000, -0.0000],
[-0.0000, -1.7823],
[ 0.0000, -0.0000],
[ 0.0000, -0.0000]], grad_fn=<DropoutBackward>)